mirror of
https://github.com/makeplane/plane
synced 2025-08-07 19:59:33 +00:00
Compare commits
13 Commits
fix-live-s
...
fix/extern
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2475097b4e | ||
|
|
1a2186cca4 | ||
|
|
ac39bb9b1d | ||
|
|
b8b58d3acd | ||
|
|
75ca932682 | ||
|
|
be9c0be886 | ||
|
|
ee08feb59b | ||
|
|
22b0bb4615 | ||
|
|
1e464d8613 | ||
|
|
3224dc4fe6 | ||
|
|
7010448c34 | ||
|
|
1cc18a0915 | ||
|
|
a04ad4c4e2 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -80,3 +80,6 @@ tmp/
|
||||
## packages
|
||||
dist
|
||||
.temp/
|
||||
|
||||
# logs
|
||||
combined.log
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
|
||||
worker: celery -A plane worker -l info
|
||||
worker: celery -A plane worker -l info -Q internal_tasks,external_tasks
|
||||
beat: celery -A plane beat -l INFO
|
||||
@@ -281,20 +281,22 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "cycle.activity.deleted",
|
||||
'requested_data': json.dumps({
|
||||
"cycle_id": str(pk),
|
||||
"cycle_name": str(cycle.name),
|
||||
"issues": [str(issue_id) for issue_id in cycle_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
}),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': None,
|
||||
'project_id': str(project_id),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
# Delete the cycle
|
||||
cycle.delete()
|
||||
@@ -454,21 +456,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
requested_data=json.dumps({"cycles_list": str(issues)}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "cycle.activity.created",
|
||||
'requested_data': json.dumps({"cycles_list": str(issues)}),
|
||||
'actor_id': str(self.request.user.id),
|
||||
'issue_id': None,
|
||||
'project_id': str(self.kwargs.get("project_id", None)),
|
||||
'current_instance': json.dumps({
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
"created_cycle_issues": serializers.serialize("json", record_to_create),
|
||||
}),
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
|
||||
# Return all Cycle Issues
|
||||
@@ -483,19 +485,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
issue_id = cycle_issue.issue_id
|
||||
cycle_issue.delete()
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "cycle.activity.deleted",
|
||||
'requested_data': json.dumps({
|
||||
"cycle_id": str(self.kwargs.get("cycle_id")),
|
||||
"issues": [str(issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
}),
|
||||
'actor_id': str(self.request.user.id),
|
||||
'issue_id': str(issue_id),
|
||||
'project_id': str(self.kwargs.get("project_id", None)),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
@@ -142,14 +142,18 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
# Create an Issue Activity
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
"type": "issue.activity.created",
|
||||
"requested_data": json.dumps(request.data, cls=DjangoJSONEncoder),
|
||||
"actor_id": str(request.user.id),
|
||||
"issue_id": str(issue.id),
|
||||
"project_id": str(project_id),
|
||||
"current_instance": None,
|
||||
"epoch": int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key="external",
|
||||
)
|
||||
|
||||
# create an inbox issue
|
||||
@@ -232,17 +236,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
# Log all the updates
|
||||
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
|
||||
if issue is not None:
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
"type": "issue.activity.updated",
|
||||
"requested_data": requested_data,
|
||||
"actor_id": str(request.user.id),
|
||||
"issue_id": str(issue_id),
|
||||
"project_id": str(project_id),
|
||||
"current_instance": json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
),
|
||||
"epoch": int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key="external",
|
||||
)
|
||||
issue_serializer.save()
|
||||
else:
|
||||
|
||||
@@ -207,14 +207,18 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
serializer.save()
|
||||
|
||||
# Track the issue
|
||||
issue_activity.delay(
|
||||
type="issue.activity.created",
|
||||
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(serializer.data.get("id", None)),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
'type': "issue.activity.created",
|
||||
'requested_data': json.dumps(self.request.data, cls=DjangoJSONEncoder),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(serializer.data.get("id", None)),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -228,14 +232,18 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
serializer = IssueSerializer(issue, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "issue.activity.updated",
|
||||
'requested_data': requested_data,
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(pk),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': current_instance,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -246,14 +254,19 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
IssueSerializer(issue).data, cls=DjangoJSONEncoder
|
||||
)
|
||||
issue.delete()
|
||||
issue_activity.delay(
|
||||
type="issue.activity.deleted",
|
||||
requested_data=json.dumps({"issue_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(pk),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "issue.activity.deleted",
|
||||
'requested_data': json.dumps({"issue_id": str(pk)}),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(pk),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': current_instance,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='your_routing_key',
|
||||
queue='your_queue_name'
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -309,7 +322,11 @@ class LabelAPIEndpoint(BaseAPIView):
|
||||
).data,
|
||||
)
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,)
|
||||
serializer = LabelSerializer(
|
||||
label,
|
||||
fields=self.fields,
|
||||
expand=self.expand,
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, pk=None):
|
||||
@@ -319,7 +336,6 @@ class LabelAPIEndpoint(BaseAPIView):
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
def delete(self, request, slug, project_id, pk=None):
|
||||
label = self.get_queryset().get(pk=pk)
|
||||
@@ -384,14 +400,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
project_id=project_id,
|
||||
issue_id=issue_id,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
'type': "link.activity.created",
|
||||
'requested_data': json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
'actor_id': str(self.request.user.id),
|
||||
'issue_id': str(self.kwargs.get("issue_id")),
|
||||
'project_id': str(self.kwargs.get("project_id")),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -408,14 +428,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="link.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
'type': "link.activity.updated",
|
||||
'requested_data': requested_data,
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(issue_id),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': current_instance,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -428,14 +452,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
IssueLinkSerializer(issue_link).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="link.activity.deleted",
|
||||
requested_data=json.dumps({"link_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
'type': "link.activity.deleted",
|
||||
'requested_data': json.dumps({"link_id": str(pk)}),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(issue_id),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': current_instance,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
issue_link.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -507,14 +535,20 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
issue_id=issue_id,
|
||||
actor=request.user,
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="comment.activity.created",
|
||||
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=str(self.kwargs.get("issue_id")),
|
||||
project_id=str(self.kwargs.get("project_id")),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
"type": "comment.activity.created",
|
||||
"requested_data": json.dumps(
|
||||
serializer.data, cls=DjangoJSONEncoder
|
||||
),
|
||||
"actor_id": str(self.request.user.id),
|
||||
"issue_id": str(self.kwargs.get("issue_id")),
|
||||
"project_id": str(self.kwargs.get("project_id")),
|
||||
"current_instance": None,
|
||||
"epoch": int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key="external",
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -533,14 +567,18 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
"type": "comment.activity.updated",
|
||||
"requested_data": requested_data,
|
||||
"actor_id": str(request.user.id),
|
||||
"issue_id": str(issue_id),
|
||||
"project_id": str(project_id),
|
||||
"current_instance": current_instance,
|
||||
"epoch": int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key="external",
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -554,14 +592,18 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
issue_comment.delete()
|
||||
issue_activity.delay(
|
||||
type="comment.activity.deleted",
|
||||
requested_data=json.dumps({"comment_id": str(pk)}),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=current_instance,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
"type": "comment.activity.deleted",
|
||||
"requested_data": json.dumps({"comment_id": str(pk)}),
|
||||
"actor_id": str(request.user.id),
|
||||
"issue_id": str(issue_id),
|
||||
"project_id": str(project_id),
|
||||
"current_instance": current_instance,
|
||||
"epoch": int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key="external",
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -582,7 +624,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
.select_related("actor", "workspace", "issue", "project")
|
||||
).order_by(request.GET.get("order_by", "created_at"))
|
||||
|
||||
|
||||
if pk:
|
||||
issue_activities = issue_activities.get(pk=pk)
|
||||
serializer = IssueActivitySerializer(issue_activities)
|
||||
|
||||
@@ -166,20 +166,22 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
module_issues = list(
|
||||
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
|
||||
)
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "module.activity.deleted",
|
||||
'requested_data': json.dumps({
|
||||
"module_id": str(pk),
|
||||
"module_name": str(module.name),
|
||||
"issues": [str(issue_id) for issue_id in module_issues],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
}),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': None,
|
||||
'project_id': str(project_id),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
module.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -330,21 +332,21 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="module.activity.created",
|
||||
requested_data=json.dumps({"modules_list": str(issues)}),
|
||||
actor_id=str(self.request.user.id),
|
||||
issue_id=None,
|
||||
project_id=str(self.kwargs.get("project_id", None)),
|
||||
current_instance=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[],
|
||||
kwargs={
|
||||
'type': "module.activity.created",
|
||||
'requested_data': json.dumps({"modules_list": str(issues)}),
|
||||
'actor_id': str(self.request.user.id),
|
||||
'issue_id': None,
|
||||
'project_id': str(self.kwargs.get("project_id", None)),
|
||||
'current_instance': json.dumps({
|
||||
"updated_module_issues": update_module_issue_activity,
|
||||
"created_module_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
),
|
||||
}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
"created_module_issues": serializers.serialize("json", record_to_create),
|
||||
}),
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -357,18 +359,20 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id
|
||||
)
|
||||
module_issue.delete()
|
||||
issue_activity.delay(
|
||||
type="module.activity.deleted",
|
||||
requested_data=json.dumps(
|
||||
{
|
||||
issue_activity.apply_async(
|
||||
args=[], # If no positional arguments are required
|
||||
kwargs={
|
||||
'type': "module.activity.deleted",
|
||||
'requested_data': json.dumps({
|
||||
"module_id": str(module_id),
|
||||
"issues": [str(module_issue.issue_id)],
|
||||
}
|
||||
),
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=None,
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
}),
|
||||
'actor_id': str(request.user.id),
|
||||
'issue_id': str(issue_id),
|
||||
'project_id': str(project_id),
|
||||
'current_instance': None,
|
||||
'epoch': int(timezone.now().timestamp()),
|
||||
},
|
||||
routing_key='external',
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@@ -1,5 +1,7 @@
|
||||
# Python imports
|
||||
import uuid
|
||||
import json
|
||||
import requests
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@@ -7,7 +9,7 @@ from rest_framework.response import Response
|
||||
|
||||
# Django imports
|
||||
from django.db.models import Max, Q
|
||||
|
||||
from django.conf import settings
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
from plane.db.models import (
|
||||
@@ -34,20 +36,15 @@ from plane.app.serializers import (
|
||||
IssueFlatSerializer,
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.utils.integrations.github import get_github_repo_details
|
||||
from plane.utils.importers.jira import jira_project_issue_summary
|
||||
from plane.bgtasks.importer_task import service_importer
|
||||
from plane.utils.html_processor import strip_tags
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
|
||||
from plane.bgtasks.importer_task import service_importer
|
||||
|
||||
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
|
||||
def get(self, request, slug, service):
|
||||
if service == "github":
|
||||
owner = request.GET.get("owner", False)
|
||||
repo = request.GET.get("repo", False)
|
||||
|
||||
if not owner or not repo:
|
||||
return Response(
|
||||
{"error": "Owner and repo are required"},
|
||||
@@ -58,11 +55,10 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata.get(
|
||||
"access_tokens_url", False
|
||||
)
|
||||
installtion_id = workspace_integration.config.get("installation_id", False)
|
||||
|
||||
if not access_tokens_url:
|
||||
# Check for the installation id
|
||||
if not installtion_id:
|
||||
return Response(
|
||||
{
|
||||
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
|
||||
@@ -70,18 +66,33 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
issue_count, labels, collaborators = get_github_repo_details(
|
||||
access_tokens_url, owner, repo
|
||||
)
|
||||
# Request segway for the required information
|
||||
if settings.SEGWAY_BASE_URL:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": settings.SEGWAY_KEY,
|
||||
}
|
||||
data = {
|
||||
"owner": owner,
|
||||
"repo": repo,
|
||||
"installationId": installtion_id,
|
||||
}
|
||||
res = requests.post(
|
||||
f"{settings.SEGWAY_BASE_URL}/api/github",
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
if "error" in res.json():
|
||||
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
res.json(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"issue_count": issue_count,
|
||||
"labels": labels,
|
||||
"collaborators": collaborators,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
{"error": "Inetgration service is not available please try later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if service == "jira":
|
||||
# Check for all the keys
|
||||
params = {
|
||||
@@ -102,16 +113,35 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
|
||||
email = request.GET.get("email", "")
|
||||
cloud_hostname = request.GET.get("cloud_hostname", "")
|
||||
|
||||
response = jira_project_issue_summary(
|
||||
email, api_token, project_key, cloud_hostname
|
||||
)
|
||||
if "error" in response:
|
||||
return Response(response, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
response,
|
||||
status=status.HTTP_200_OK,
|
||||
if settings.SEGWAY_BASE_URL:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": settings.SEGWAY_KEY,
|
||||
}
|
||||
data = {
|
||||
"project_key": project_key,
|
||||
"api_token": api_token,
|
||||
"email": email,
|
||||
"cloud_hostname": cloud_hostname,
|
||||
}
|
||||
res = requests.post(
|
||||
f"{settings.SEGWAY_BASE_URL}/api/jira",
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if "error" in res.json():
|
||||
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(
|
||||
res.json(),
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"error": "Inetgration service is not available please try later"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response(
|
||||
{"error": "Service not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -122,7 +152,21 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkSpaceAdminPermission,
|
||||
]
|
||||
|
||||
def post(self, request, slug, service):
|
||||
if service not in ["github", "jira"]:
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if service == "github":
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
integration__provider="github", workspace__slug=slug
|
||||
)
|
||||
|
||||
installation_id = workspace_integration.config.get("installation_id", False)
|
||||
|
||||
project_id = request.data.get("project_id", False)
|
||||
|
||||
if not project_id:
|
||||
@@ -130,87 +174,84 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
{"error": "Project ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
workspace = Workspace.objects.get(slug=slug)
|
||||
|
||||
if service == "github":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
# Validate the data
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata or not config:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
# Update config
|
||||
if config and service == "github":
|
||||
config.update({"installation_id": installation_id})
|
||||
|
||||
if service == "jira":
|
||||
data = request.data.get("data", False)
|
||||
metadata = request.data.get("metadata", False)
|
||||
config = request.data.get("config", False)
|
||||
if not data or not metadata:
|
||||
return Response(
|
||||
{"error": "Data, config and metadata are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
# Get the api token -- # derecated
|
||||
api_token = APIToken.objects.filter(
|
||||
user=request.user, workspace=workspace
|
||||
).first()
|
||||
if api_token is None:
|
||||
api_token = APIToken.objects.create(
|
||||
user=request.user,
|
||||
label="Importer",
|
||||
workspace=workspace,
|
||||
)
|
||||
|
||||
service_importer.delay(service, importer.id)
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response(
|
||||
{"error": "Servivce not supported yet"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
# Create an import
|
||||
importer = Importer.objects.create(
|
||||
service=service,
|
||||
project_id=project_id,
|
||||
status="queued",
|
||||
initiated_by=request.user,
|
||||
data=data,
|
||||
metadata=metadata,
|
||||
token=api_token,
|
||||
config=config,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
)
|
||||
|
||||
# Push it to segway
|
||||
if settings.SEGWAY_BASE_URL:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": settings.SEGWAY_KEY,
|
||||
}
|
||||
data = {
|
||||
"metadata": metadata,
|
||||
"data": data,
|
||||
"config": config,
|
||||
"workspace_id": str(workspace.id),
|
||||
"project_id": str(project_id),
|
||||
"created_by": str(request.user.id),
|
||||
"importer_id": str(importer.id),
|
||||
}
|
||||
res = requests.post(
|
||||
f"{settings.SEGWAY_BASE_URL}/api/github/import",
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if "error" in res.json():
|
||||
importer.status = "failed"
|
||||
importer.reason = str(res.json())
|
||||
importer.save()
|
||||
else:
|
||||
importer.status = "processing"
|
||||
importer.save(update_fields=["status"])
|
||||
else:
|
||||
importer.status = "failed"
|
||||
importer.reason = "Segway base url is not present"
|
||||
importer.save(update_fields=["status", "reason"])
|
||||
|
||||
# return the response
|
||||
serializer = ImporterSerializer(importer)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
def get(self, request, slug):
|
||||
imports = (
|
||||
Importer.objects.filter(workspace__slug=slug)
|
||||
@@ -221,9 +262,7 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
return Response(serializer.data)
|
||||
|
||||
def delete(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
importer = Importer.objects.get(pk=pk, service=service, workspace__slug=slug)
|
||||
|
||||
if importer.imported_data is not None:
|
||||
# Delete all imported Issues
|
||||
@@ -241,9 +280,7 @@ class ImportServiceEndpoint(BaseAPIView):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def patch(self, request, slug, service, pk):
|
||||
importer = Importer.objects.get(
|
||||
pk=pk, service=service, workspace__slug=slug
|
||||
)
|
||||
importer = Importer.objects.get(pk=pk, service=service, workspace__slug=slug)
|
||||
serializer = ImporterSerializer(importer, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
@@ -479,9 +516,7 @@ class BulkImportModulesEndpoint(BaseAPIView):
|
||||
[
|
||||
ModuleLink(
|
||||
module=module,
|
||||
url=module_data.get("link", {}).get(
|
||||
"url", "https://plane.so"
|
||||
),
|
||||
url=module_data.get("link", {}).get("url", "https://plane.so"),
|
||||
title=module_data.get("link", {}).get(
|
||||
"title", "Original Issue"
|
||||
),
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
# Python improts
|
||||
import uuid
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
@@ -27,6 +30,7 @@ from plane.utils.integrations.github import (
|
||||
from plane.app.permissions import WorkSpaceAdminPermission
|
||||
from plane.utils.integrations.slack import slack_oauth
|
||||
|
||||
|
||||
class IntegrationViewSet(BaseViewSet):
|
||||
serializer_class = IntegrationSerializer
|
||||
model = Integration
|
||||
@@ -46,9 +50,7 @@ class IntegrationViewSet(BaseViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = IntegrationSerializer(
|
||||
integration, data=request.data, partial=True
|
||||
)
|
||||
serializer = IntegrationSerializer(integration, data=request.data, partial=True)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
@@ -94,14 +96,30 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
{"error": "Installation ID is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
metadata = get_github_metadata(installation_id)
|
||||
# Push it to segway
|
||||
if settings.SEGWAY_BASE_URL:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": settings.SEGWAY_KEY,
|
||||
}
|
||||
data = {"installationId": installation_id}
|
||||
res = requests.post(
|
||||
f"{settings.SEGWAY_BASE_URL}/api/github/metadata",
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
if "error" in res.json():
|
||||
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
|
||||
metadata = res.json()
|
||||
config = {"installation_id": installation_id}
|
||||
|
||||
if provider == "slack":
|
||||
code = request.data.get("code", False)
|
||||
|
||||
if not code:
|
||||
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
slack_response = slack_oauth(code=code)
|
||||
|
||||
@@ -123,9 +141,7 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
is_password_autoset=True,
|
||||
is_bot=True,
|
||||
first_name=integration.title,
|
||||
avatar=integration.avatar_url
|
||||
if integration.avatar_url is not None
|
||||
else "",
|
||||
avatar=integration.avatar_url if integration.avatar_url is not None else "",
|
||||
)
|
||||
|
||||
# Create an API Token for the bot user
|
||||
@@ -161,9 +177,7 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider == "github":
|
||||
installation_id = workspace_integration.config.get(
|
||||
"installation_id", False
|
||||
)
|
||||
installation_id = workspace_integration.config.get("installation_id", False)
|
||||
if installation_id:
|
||||
delete_github_installation(installation_id=installation_id)
|
||||
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
# Python imports
|
||||
import json
|
||||
import requests
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseViewSet, BaseAPIView
|
||||
from plane.db.models import (
|
||||
@@ -35,19 +42,32 @@ class GithubRepositoriesEndpoint(BaseAPIView):
|
||||
workspace__slug=slug, pk=workspace_integration_id
|
||||
)
|
||||
|
||||
if workspace_integration.integration.provider != "github":
|
||||
installation_id = workspace_integration.config.get("installation_id")
|
||||
|
||||
if not installation_id:
|
||||
return Response(
|
||||
{"error": "Not a github integration"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
|
||||
repositories_url = (
|
||||
workspace_integration.metadata["repositories_url"]
|
||||
+ f"?per_page=100&page={page}"
|
||||
)
|
||||
repositories = get_github_repos(access_tokens_url, repositories_url)
|
||||
return Response(repositories, status=status.HTTP_200_OK)
|
||||
# Push it to segway
|
||||
if settings.SEGWAY_BASE_URL:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": settings.SEGWAY_KEY,
|
||||
}
|
||||
data = {
|
||||
"installationId": installation_id,
|
||||
"page": page,
|
||||
}
|
||||
res = requests.post(
|
||||
f"{settings.SEGWAY_BASE_URL}/api/github/repos",
|
||||
data=json.dumps(data),
|
||||
headers=headers,
|
||||
)
|
||||
if "error" in res.json():
|
||||
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
return Response(res.json(), status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class GithubRepositorySyncViewSet(BaseViewSet):
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
from .issue_sync_task import issue_sync
|
||||
@@ -373,7 +373,7 @@ def generate_non_segmented_rows(
|
||||
return [tuple(row_zero)] + rows
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def analytic_export_task(email, data, slug):
|
||||
try:
|
||||
filters = issue_filters(data, "POST")
|
||||
|
||||
@@ -29,7 +29,7 @@ def posthogConfiguration():
|
||||
return None, None
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
@@ -54,7 +54,7 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
capture_exception(e)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_from):
|
||||
try:
|
||||
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
|
||||
|
||||
@@ -259,7 +259,7 @@ def generate_xlsx(header, project_id, issues, files):
|
||||
files.append((f"{project_id}.xlsx", xlsx_file))
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
|
||||
try:
|
||||
exporter_instance = ExporterHistory.objects.get(token=token_id)
|
||||
|
||||
@@ -15,7 +15,7 @@ from botocore.client import Config
|
||||
from plane.db.models import ExporterHistory
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def delete_old_s3_link():
|
||||
# Get a list of keys and IDs to process
|
||||
expired_exporter_history = ExporterHistory.objects.filter(
|
||||
|
||||
@@ -12,7 +12,7 @@ from celery import shared_task
|
||||
from plane.db.models import FileAsset
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def delete_file_asset():
|
||||
|
||||
# file assets to delete
|
||||
|
||||
@@ -17,7 +17,7 @@ from sentry_sdk import capture_exception
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
try:
|
||||
relative_link = (
|
||||
|
||||
@@ -1,200 +1,467 @@
|
||||
# Python imports
|
||||
import json
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.models import Q, Max
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third Party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
from celery.exceptions import MaxRetriesExceededError
|
||||
|
||||
# Module imports
|
||||
from plane.app.serializers import ImporterSerializer
|
||||
from plane.db.models import (
|
||||
Importer,
|
||||
WorkspaceMember,
|
||||
GithubRepositorySync,
|
||||
GithubRepository,
|
||||
ProjectMember,
|
||||
WorkspaceIntegration,
|
||||
Label,
|
||||
User,
|
||||
IssueProperty,
|
||||
IssueAssignee,
|
||||
IssueLabel,
|
||||
IssueSequence,
|
||||
IssueActivity,
|
||||
IssueComment,
|
||||
IssueLink,
|
||||
ModuleIssue,
|
||||
State,
|
||||
Module,
|
||||
Issue,
|
||||
Cycle,
|
||||
)
|
||||
from plane.bgtasks.user_welcome_task import send_welcome_slack
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue="internal_tasks")
|
||||
def service_importer(service, importer_id):
|
||||
pass
|
||||
|
||||
|
||||
## Utility functions
|
||||
def get_label_id(name, data):
|
||||
try:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "processing"
|
||||
importer.save()
|
||||
existing_label = (
|
||||
Label.objects.filter(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
name__iexact=name,
|
||||
)
|
||||
.values("id")
|
||||
.first()
|
||||
)
|
||||
return existing_label
|
||||
except Label.DoesNotExist:
|
||||
return None
|
||||
|
||||
users = importer.data.get("users", [])
|
||||
|
||||
# Check if we need to import users as well
|
||||
if len(users):
|
||||
# For all invited users create the users
|
||||
new_users = User.objects.bulk_create(
|
||||
[
|
||||
User(
|
||||
email=user.get("email").strip().lower(),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
],
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
def get_state_id(name, data):
|
||||
try:
|
||||
existing_state = (
|
||||
State.objects.filter(
|
||||
name__iexact=name,
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
.values("id")
|
||||
.first()
|
||||
)
|
||||
return existing_state
|
||||
except State.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_id(name):
|
||||
try:
|
||||
existing_user = User.objects.filter(email=name).values("id").first()
|
||||
return existing_user
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def update_imported_items(importer_id, entity, entity_id):
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
if importer.imported_data:
|
||||
importer.imported_data.setdefault(str(entity), []).append(str(entity_id))
|
||||
else:
|
||||
importer.imported_data = {
|
||||
str(entity): [str(entity_id)]
|
||||
}
|
||||
importer.save()
|
||||
|
||||
|
||||
## Sync functions
|
||||
def members_sync(data):
|
||||
try:
|
||||
user = User.objects.get(email=data.get("email"))
|
||||
_ = WorkspaceMember.objects.get_or_create(
|
||||
member_id=user.id, workspace_id=data.get("workspace_id")
|
||||
)
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member_id=user.id,
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
_ = IssueProperty.objects.get_or_create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
user_id=user.id,
|
||||
created_by_id=data.get("created_by"),
|
||||
)
|
||||
|
||||
except User.DoesNotExist:
|
||||
# For all invited users create the users
|
||||
new_user = User.objects.create(
|
||||
email=data.get("email").strip().lower(),
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
|
||||
service = data.get("external_source")
|
||||
|
||||
WorkspaceMember.objects.create(
|
||||
member_id=new_user.id,
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by"),
|
||||
)
|
||||
|
||||
ProjectMember.objects.create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
member_id=new_user.id,
|
||||
created_by_id=data.get("created_by"),
|
||||
)
|
||||
|
||||
IssueProperty.objects.create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
user_id=new_user.id,
|
||||
created_by_id=data.get("created_by"),
|
||||
)
|
||||
if data.get("source", False) == "slack":
|
||||
send_welcome_slack.delay(
|
||||
str(new_user.id),
|
||||
True,
|
||||
f"{new_user.email} was imported to Plane from {service}",
|
||||
)
|
||||
|
||||
_ = [
|
||||
send_welcome_slack.delay(
|
||||
str(user.id),
|
||||
True,
|
||||
f"{user.email} was imported to Plane from {service}",
|
||||
)
|
||||
for user in new_users
|
||||
]
|
||||
|
||||
workspace_users = User.objects.filter(
|
||||
email__in=[
|
||||
user.get("email").strip().lower()
|
||||
for user in users
|
||||
if user.get("import", False) == "invite"
|
||||
or user.get("import", False) == "map"
|
||||
]
|
||||
def label_sync(data):
|
||||
existing_label = Label.objects.filter(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
name__iexact=data.get("name"),
|
||||
external_id=data.get("external_id", None),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
|
||||
if not existing_label.exists() and data.get("name"):
|
||||
label = Label.objects.create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
name=data.get("name"),
|
||||
color=data.get("color"),
|
||||
created_by_id=data.get("created_by"),
|
||||
external_id=data.get("external_id", None),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
update_imported_items(data.get("importer_id"), "labels", label.id)
|
||||
|
||||
|
||||
def state_sync(data):
|
||||
try:
|
||||
state = State.objects.get(
|
||||
external_id=data.get("external_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
|
||||
except State.DoesNotExist:
|
||||
existing_states = State.objects.filter(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
group=data.get("state_group"),
|
||||
name__iexact=data.get("state_name"),
|
||||
)
|
||||
|
||||
if existing_states.exists():
|
||||
existing_state = existing_states.first()
|
||||
existing_state.external_id = data.get("external_id")
|
||||
existing_state.external_source = data.get("external_source")
|
||||
existing_state.save()
|
||||
else:
|
||||
state = State.objects.create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
name=data.get("state_name"),
|
||||
group=data.get("state_group"),
|
||||
created_by_id=data.get("created_by"),
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
update_imported_items(data.get("importer_id"), "states", state.id)
|
||||
|
||||
# Check if any of the users are already member of workspace
|
||||
_ = WorkspaceMember.objects.filter(
|
||||
member__in=[user for user in workspace_users],
|
||||
workspace_id=importer.workspace_id,
|
||||
).update(is_active=True)
|
||||
|
||||
# Add new users to Workspace and project automatically
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
member=user,
|
||||
workspace_id=importer.workspace_id,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
def issue_sync(data):
|
||||
try:
|
||||
issue = Issue.objects.get(
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
except Issue.DoesNotExist:
|
||||
# Get the default state
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=data.get("project_id"), default=True
|
||||
).first()
|
||||
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
member=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
IssueProperty.objects.bulk_create(
|
||||
[
|
||||
IssueProperty(
|
||||
project_id=importer.project_id,
|
||||
workspace_id=importer.workspace_id,
|
||||
user=user,
|
||||
created_by=importer.created_by,
|
||||
)
|
||||
for user in workspace_users
|
||||
],
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Check if sync config is on for github importers
|
||||
if service == "github" and importer.config.get("sync", False):
|
||||
name = importer.metadata.get("name", False)
|
||||
url = importer.metadata.get("url", False)
|
||||
config = importer.metadata.get("config", {})
|
||||
owner = importer.metadata.get("owner", False)
|
||||
repository_id = importer.metadata.get("repository_id", False)
|
||||
|
||||
workspace_integration = WorkspaceIntegration.objects.get(
|
||||
workspace_id=importer.workspace_id, integration__provider="github"
|
||||
)
|
||||
|
||||
# Delete the old repository object
|
||||
GithubRepositorySync.objects.filter(project_id=importer.project_id).delete()
|
||||
GithubRepository.objects.filter(project_id=importer.project_id).delete()
|
||||
|
||||
# Create a Label for github
|
||||
label = Label.objects.filter(
|
||||
name="GitHub", project_id=importer.project_id
|
||||
# if there is no default state assign any random state
|
||||
if default_state is None:
|
||||
default_state = State.objects.filter(
|
||||
~Q(name="Triage"), project_id=data.get("project_id")
|
||||
).first()
|
||||
|
||||
if label is None:
|
||||
label = Label.objects.create(
|
||||
name="GitHub",
|
||||
project_id=importer.project_id,
|
||||
description="Label to sync Plane issues with GitHub issues",
|
||||
color="#003773",
|
||||
)
|
||||
# Create repository
|
||||
repo = GithubRepository.objects.create(
|
||||
name=name,
|
||||
url=url,
|
||||
config=config,
|
||||
repository_id=repository_id,
|
||||
owner=owner,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
# Get the maximum sequence_id
|
||||
last_id = IssueSequence.objects.filter(
|
||||
project_id=data.get("project_id")
|
||||
).aggregate(largest=Max("sequence"))["largest"]
|
||||
|
||||
# Create repo sync
|
||||
_ = GithubRepositorySync.objects.create(
|
||||
repository=repo,
|
||||
workspace_integration=workspace_integration,
|
||||
actor=workspace_integration.actor,
|
||||
credentials=importer.data.get("credentials", {}),
|
||||
project_id=importer.project_id,
|
||||
label=label,
|
||||
)
|
||||
last_id = 1 if last_id is None else last_id + 1
|
||||
|
||||
# Add bot as a member in the project
|
||||
_ = ProjectMember.objects.get_or_create(
|
||||
member=workspace_integration.actor,
|
||||
role=20,
|
||||
project_id=importer.project_id,
|
||||
)
|
||||
# Get the maximum sort order
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project_id=data.get("project_id"), state=default_state
|
||||
).aggregate(largest=Max("sort_order"))["largest"]
|
||||
|
||||
if settings.PROXY_BASE_URL:
|
||||
headers = {"Content-Type": "application/json"}
|
||||
import_data_json = json.dumps(
|
||||
ImporterSerializer(importer).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
_ = requests.post(
|
||||
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
|
||||
json=import_data_json,
|
||||
headers=headers,
|
||||
)
|
||||
largest_sort_order = (
|
||||
65535 if largest_sort_order is None else largest_sort_order + 10000
|
||||
)
|
||||
parent_id = None
|
||||
if data.get("parent_id", False):
|
||||
parent_id = Issue.objects.filter(
|
||||
external_id=data.get("parent_id"),
|
||||
external_source=data.get("external_source"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
).values("id")
|
||||
|
||||
# Issues
|
||||
issue = Issue.objects.create(
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
state_id=get_state_id(data.get("state"), data).get("id")
|
||||
if get_state_id(data.get("state"), data)
|
||||
else default_state.id,
|
||||
name=data.get("name", "Issue Created through Importer")[:255],
|
||||
description_html=data.get("description_html", "<p></p>"),
|
||||
sequence_id=last_id,
|
||||
sort_order=largest_sort_order,
|
||||
start_date=data.get("start_date", None),
|
||||
target_date=data.get("target_date", None),
|
||||
priority=data.get("priority", "none"),
|
||||
created_by_id=data.get("created_by_id"),
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
parent_id=parent_id,
|
||||
)
|
||||
|
||||
# Sequences
|
||||
_ = IssueSequence.objects.create(
|
||||
issue=issue,
|
||||
sequence=issue.sequence_id,
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
|
||||
# Attach Links
|
||||
_ = IssueLink.objects.create(
|
||||
issue=issue,
|
||||
url=data.get("link", {}).get("url", "https://github.com"),
|
||||
title=data.get("link", {}).get("title", "Original Issue"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by_id"),
|
||||
)
|
||||
|
||||
# Track the issue activities
|
||||
_ = IssueActivity.objects.create(
|
||||
issue=issue,
|
||||
actor_id=data.get("created_by_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
comment=f"imported the issue from {data.get('external_source')}",
|
||||
verb="created",
|
||||
created_by_id=data.get("created_by_id"),
|
||||
)
|
||||
|
||||
update_imported_items(data.get("importer_id"), "issues", issue.id)
|
||||
|
||||
|
||||
def issue_label_sync(data):
|
||||
issue = Issue.objects.get(
|
||||
external_source=data.get("external_issue_source"),
|
||||
external_id=data.get("external_issue_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
if get_label_id(data.get("name"), data):
|
||||
IssueLabel.objects.create(
|
||||
issue=issue,
|
||||
label_id=get_label_id(data.get("name"), data).get("id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by_id"),
|
||||
)
|
||||
|
||||
|
||||
def issue_assignee_sync(data):
|
||||
issue = Issue.objects.get(
|
||||
external_source=data.get("external_issue_source"),
|
||||
external_id=data.get("external_issue_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
user = User.objects.filter(email=data.get("email")).values("id")
|
||||
|
||||
IssueAssignee.objects.create(
|
||||
issue=issue,
|
||||
assignee_id=user,
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by_id"),
|
||||
)
|
||||
|
||||
|
||||
def issue_comment_sync(data):
|
||||
# Create Comments
|
||||
issue = Issue.objects.get(
|
||||
external_source=data.get("external_issue_source"),
|
||||
external_id=data.get("external_issue_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
IssueComment.objects.create(
|
||||
issue=issue,
|
||||
comment_html=data.get("comment_html", "<p></p>"),
|
||||
actor_id=data.get("created_by_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=get_user_id(data.get("created_by_id")).get("id")
|
||||
if get_user_id(data.get("created_by_id"))
|
||||
else data.get("created_by_id"),
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
|
||||
|
||||
def cycles_sync(data):
|
||||
try:
|
||||
_ = Cycle.objects.get(
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
except Cycle.DoesNotExist:
|
||||
cycle = Cycle.objects.create(
|
||||
name=data.get("name"),
|
||||
description_html=data.get("description_html", "<p></p>"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by"),
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
update_imported_items(data.get("importer_id"), "cycles", cycle.id)
|
||||
|
||||
|
||||
def module_sync(data):
|
||||
try:
|
||||
_ = Module.objects.get(
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
except Module.DoesNotExist:
|
||||
module = Module.objects.create(
|
||||
name=data.get("name"),
|
||||
description_html=data.get("description_html", "<p></p>"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by"),
|
||||
external_id=data.get("external_id"),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
update_imported_items(data.get("importer_id"), "modules", module.id)
|
||||
|
||||
|
||||
def modules_issue_sync(data):
|
||||
module = Module.objects.get(
|
||||
external_id=data.get("module_id"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
external_source=data.get("external_source"),
|
||||
)
|
||||
issue = Issue.objects.get(
|
||||
external_id=data.get("issue_id"),
|
||||
external_source=data.get("external_source"),
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
)
|
||||
|
||||
_ = ModuleIssue.objects.create(
|
||||
module=module,
|
||||
issue=issue,
|
||||
project_id=data.get("project_id"),
|
||||
workspace_id=data.get("workspace_id"),
|
||||
created_by_id=data.get("created_by"),
|
||||
)
|
||||
|
||||
|
||||
def import_sync(data):
|
||||
importer = Importer.objects.get(pk=data.get("importer_id"))
|
||||
importer.status = data.get("status")
|
||||
importer.save(update_fields=["status"])
|
||||
|
||||
|
||||
@shared_task(bind=True, queue="segway_task", max_retries=5)
|
||||
def import_task(self, data):
|
||||
type = data.get("type")
|
||||
|
||||
if type is None:
|
||||
return
|
||||
|
||||
TYPE_MAPPER = {
|
||||
"member.sync": members_sync,
|
||||
"label.sync": label_sync,
|
||||
"state.sync": state_sync,
|
||||
"issue.sync": issue_sync,
|
||||
"issue.label.sync": issue_label_sync,
|
||||
"issue.assignee.sync": issue_assignee_sync,
|
||||
"issue.comment.sync": issue_comment_sync,
|
||||
"cycle.sync": cycles_sync,
|
||||
"module.sync": module_sync,
|
||||
"module.issue.sync": modules_issue_sync,
|
||||
"import.sync": import_sync,
|
||||
}
|
||||
try:
|
||||
func = TYPE_MAPPER.get(type)
|
||||
if func is None:
|
||||
return
|
||||
# Call the function
|
||||
func(data)
|
||||
return
|
||||
except Exception as e:
|
||||
importer = Importer.objects.get(pk=importer_id)
|
||||
importer.status = "failed"
|
||||
importer.save()
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
try:
|
||||
# Retry with exponential backoff
|
||||
self.retry(exc=e, countdown=50, backoff=2)
|
||||
except MaxRetriesExceededError:
|
||||
# For max retries reached items fail the import
|
||||
importer = Importer.objects.get(pk=data.get("importer_id"))
|
||||
importer.status = "failed"
|
||||
importer.reason = e
|
||||
importer.save()
|
||||
|
||||
return
|
||||
|
||||
@@ -1460,7 +1460,7 @@ def delete_draft_issue_activity(
|
||||
|
||||
|
||||
# Receive message from room group
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def issue_activity(
|
||||
type,
|
||||
requested_data,
|
||||
|
||||
@@ -16,7 +16,7 @@ from plane.db.models import Issue, Project, State
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def archive_and_close_old_issues():
|
||||
archive_old_issues()
|
||||
close_old_issues()
|
||||
|
||||
5
apiserver/plane/bgtasks/issue_sync_task.py
Normal file
5
apiserver/plane/bgtasks/issue_sync_task.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from celery import shared_task
|
||||
|
||||
@shared_task(queue="segway_tasks")
|
||||
def issue_sync(data):
|
||||
print(f"Received data from Segway: {data}")
|
||||
@@ -17,7 +17,7 @@ from sentry_sdk import capture_exception
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def magic_link(email, key, token, current_site):
|
||||
try:
|
||||
(
|
||||
|
||||
@@ -183,7 +183,7 @@ def createMentionNotification(project, notification_comment, issue, actor_id, me
|
||||
)
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
|
||||
issue_activities_created = (
|
||||
json.loads(
|
||||
|
||||
@@ -15,7 +15,7 @@ from sentry_sdk import capture_exception
|
||||
from plane.db.models import Project, User, ProjectMemberInvite
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def project_invitation(email, project_id, token, current_site, invitor):
|
||||
try:
|
||||
user = User.objects.get(email=invitor)
|
||||
|
||||
@@ -11,7 +11,7 @@ from slack_sdk.errors import SlackApiError
|
||||
from plane.db.models import User
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def send_welcome_slack(user_id, created, message):
|
||||
try:
|
||||
instance = User.objects.get(pk=user_id)
|
||||
|
||||
@@ -71,6 +71,7 @@ def get_model_data(event, event_id, many=False):
|
||||
retry_backoff=600,
|
||||
max_retries=5,
|
||||
retry_jitter=True,
|
||||
queue='internal_tasks'
|
||||
)
|
||||
def webhook_task(self, webhook, slug, event, event_data, action):
|
||||
try:
|
||||
@@ -161,7 +162,7 @@ def webhook_task(self, webhook, slug, event, event_data, action):
|
||||
return
|
||||
|
||||
|
||||
@shared_task()
|
||||
@shared_task(queue='internal_tasks')
|
||||
def send_webhook(event, payload, kw, action, slug, bulk):
|
||||
try:
|
||||
webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True)
|
||||
|
||||
@@ -20,7 +20,7 @@ from plane.db.models import Workspace, WorkspaceMemberInvite, User
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
@shared_task(queue='internal_tasks')
|
||||
def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
try:
|
||||
user = User.objects.get(email=invitor)
|
||||
|
||||
@@ -8,7 +8,7 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
|
||||
|
||||
ri = redis_instance()
|
||||
|
||||
app = Celery("plane")
|
||||
app = Celery('tasks', broker='pyamqp://guest:guest@localhost:5672//')
|
||||
|
||||
# Using a string here means the worker will not have to
|
||||
# pickle the object when using Windows.
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-20 14:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0050_user_use_case_alter_workspace_organization_size'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='cycle',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='cycle',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='importer',
|
||||
name='reason',
|
||||
field=models.TextField(blank=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issue',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issue',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issuecomment',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='issuecomment',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='label',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='label',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='module',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='module',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='state',
|
||||
name='external_id',
|
||||
field=models.CharField(blank=True, max_length=255, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='state',
|
||||
name='external_source',
|
||||
field=models.CharField(blank=True, null=True),
|
||||
),
|
||||
]
|
||||
@@ -22,8 +22,6 @@ class BaseModel(AuditModel):
|
||||
user = get_current_user()
|
||||
|
||||
if user is None or user.is_anonymous:
|
||||
self.created_by = None
|
||||
self.updated_by = None
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
else:
|
||||
# Check if the model is being created or updated
|
||||
|
||||
@@ -18,6 +18,8 @@ class Cycle(ProjectBaseModel):
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Cycle"
|
||||
@@ -27,9 +29,9 @@ class Cycle(ProjectBaseModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if self._state.adding:
|
||||
smallest_sort_order = Cycle.objects.filter(
|
||||
project=self.project
|
||||
).aggregate(smallest=models.Min("sort_order"))["smallest"]
|
||||
smallest_sort_order = Cycle.objects.filter(project=self.project).aggregate(
|
||||
smallest=models.Min("sort_order")
|
||||
)["smallest"]
|
||||
|
||||
if smallest_sort_order is not None:
|
||||
self.sort_order = smallest_sort_order - 10000
|
||||
|
||||
@@ -34,6 +34,7 @@ class Importer(ProjectBaseModel):
|
||||
"db.APIToken", on_delete=models.CASCADE, related_name="importer"
|
||||
)
|
||||
imported_data = models.JSONField(null=True)
|
||||
reason = models.TextField(blank=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Importer"
|
||||
|
||||
@@ -102,6 +102,8 @@ class Issue(ProjectBaseModel):
|
||||
completed_at = models.DateTimeField(null=True)
|
||||
archived_at = models.DateField(null=True)
|
||||
is_draft = models.BooleanField(default=False)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
objects = models.Manager()
|
||||
issue_objects = IssueManager()
|
||||
@@ -132,7 +134,6 @@ class Issue(ProjectBaseModel):
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
if self._state.adding:
|
||||
# Get the maximum display_id value from the database
|
||||
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
|
||||
@@ -210,8 +211,9 @@ class IssueRelation(ProjectBaseModel):
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.related_issue.name}"
|
||||
|
||||
return f"{self.issue.name} {self.related_issue.name}"
|
||||
|
||||
|
||||
class IssueMention(ProjectBaseModel):
|
||||
issue = models.ForeignKey(
|
||||
Issue, on_delete=models.CASCADE, related_name="issue_mention"
|
||||
@@ -221,6 +223,7 @@ class IssueMention(ProjectBaseModel):
|
||||
on_delete=models.CASCADE,
|
||||
related_name="issue_mention",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["issue", "mention"]
|
||||
verbose_name = "Issue Mention"
|
||||
@@ -229,7 +232,7 @@ class IssueMention(ProjectBaseModel):
|
||||
ordering = ("-created_at",)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.issue.name} {self.mention.email}"
|
||||
return f"{self.issue.name} {self.mention.email}"
|
||||
|
||||
|
||||
class IssueAssignee(ProjectBaseModel):
|
||||
@@ -366,6 +369,8 @@ class IssueComment(ProjectBaseModel):
|
||||
default="INTERNAL",
|
||||
max_length=100,
|
||||
)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.comment_stripped = (
|
||||
@@ -416,6 +421,8 @@ class Label(ProjectBaseModel):
|
||||
description = models.TextField(blank=True)
|
||||
color = models.CharField(max_length=255, blank=True)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "project"]
|
||||
|
||||
@@ -41,6 +41,8 @@ class Module(ProjectBaseModel):
|
||||
)
|
||||
view_props = models.JSONField(default=dict)
|
||||
sort_order = models.FloatField(default=65535)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["name", "project"]
|
||||
|
||||
@@ -24,6 +24,8 @@ class State(ProjectBaseModel):
|
||||
max_length=20,
|
||||
)
|
||||
default = models.BooleanField(default=False)
|
||||
external_source = models.CharField(null=True, blank=True)
|
||||
external_id = models.CharField(max_length=255, blank=True, null=True)
|
||||
|
||||
def __str__(self):
|
||||
"""Return name of the state"""
|
||||
|
||||
@@ -5,6 +5,7 @@ import ssl
|
||||
import certifi
|
||||
from datetime import timedelta
|
||||
from urllib.parse import urlparse
|
||||
from kombu import Exchange, Queue
|
||||
|
||||
# Django imports
|
||||
from django.core.management.utils import get_random_secret_key
|
||||
@@ -148,6 +149,9 @@ else:
|
||||
REDIS_URL = os.environ.get("REDIS_URL")
|
||||
REDIS_SSL = REDIS_URL and "rediss" in REDIS_URL
|
||||
|
||||
# RabbitMq Config
|
||||
RABBITMQ_URL = os.environ.get("RABBITMQ_URL")
|
||||
|
||||
if REDIS_SSL:
|
||||
CACHES = {
|
||||
"default": {
|
||||
@@ -270,18 +274,28 @@ SIMPLE_JWT = {
|
||||
# Celery Configuration
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
CELERY_TASK_SERIALIZER = "json"
|
||||
CELERY_ACCEPT_CONTENT = ["application/json"]
|
||||
CELERY_ACCEPT_CONTENT = ["json"]
|
||||
|
||||
if REDIS_SSL:
|
||||
redis_url = os.environ.get("REDIS_URL")
|
||||
broker_url = (
|
||||
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
|
||||
)
|
||||
CELERY_BROKER_URL = broker_url
|
||||
CELERY_RESULT_BACKEND = broker_url
|
||||
else:
|
||||
CELERY_BROKER_URL = REDIS_URL
|
||||
CELERY_RESULT_BACKEND = REDIS_URL
|
||||
CELERY_BROKER_URL = RABBITMQ_URL
|
||||
CELERY_RESULT_BACKEND = REDIS_URL
|
||||
|
||||
CELERY_QUEUES = (
|
||||
Queue(
|
||||
"internal_tasks",
|
||||
Exchange("internal_exchange", type="direct"),
|
||||
routing_key="internal",
|
||||
),
|
||||
Queue(
|
||||
"external_tasks",
|
||||
Exchange("external_exchange", type="direct"),
|
||||
routing_key="external",
|
||||
),
|
||||
Queue(
|
||||
"segway_tasks",
|
||||
Exchange("segway_exchange", type="direct"),
|
||||
routing_key="segway",
|
||||
),
|
||||
)
|
||||
|
||||
CELERY_IMPORTS = (
|
||||
"plane.bgtasks.issue_automation_task",
|
||||
@@ -329,12 +343,11 @@ USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False)
|
||||
POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False)
|
||||
|
||||
# instance key
|
||||
INSTANCE_KEY = os.environ.get(
|
||||
"INSTANCE_KEY", "ae6517d563dfc13d8270bd45cf17b08f70b37d989128a9dab46ff687603333c3"
|
||||
)
|
||||
|
||||
# Skip environment variable configuration
|
||||
SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1"
|
||||
|
||||
DATA_UPLOAD_MAX_MEMORY_SIZE = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
|
||||
# Segway
|
||||
SEGWAY_BASE_URL = os.environ.get("SEGWAY_BASE_URL", "http://localhost:9000")
|
||||
SEGWAY_KEY = os.environ.get("SEGWAY_KEY", False)
|
||||
|
||||
17
packages/tsconfig/express.json
Normal file
17
packages/tsconfig/express.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2016",
|
||||
"module": "commonjs",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": "./src",
|
||||
"moduleResolution": "node",
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true
|
||||
},
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
7
segway/.env.example
Normal file
7
segway/.env.example
Normal file
@@ -0,0 +1,7 @@
|
||||
APP_ENV=local
|
||||
SERVER_PORT=9000
|
||||
DATABASE_URL=""
|
||||
RABBITMQ_URL=""
|
||||
SENTRY_DSN=""
|
||||
GITHUB_APP_ID=""
|
||||
GITHUB_APP_PRIVATE_KEY=""
|
||||
20
segway/Dockerfile.segway
Normal file
20
segway/Dockerfile.segway
Normal file
@@ -0,0 +1,20 @@
|
||||
# Use the official Node.js 18-alpine image as the base image
|
||||
FROM node:18-alpine
|
||||
|
||||
# Set the working directory inside the container
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy package.json and package-lock.json to the working directory
|
||||
COPY package*.json ./
|
||||
|
||||
# Install dependencies
|
||||
RUN npm install
|
||||
|
||||
# Copy the rest of the application code to the working directory
|
||||
COPY . .
|
||||
|
||||
# Build the TypeScript code
|
||||
RUN npm run build
|
||||
|
||||
# Expose the port that your application will run on
|
||||
EXPOSE 9000
|
||||
3
segway/README.md
Normal file
3
segway/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Plane Segway
|
||||
|
||||
A node process that take care of external integration with plane.
|
||||
1776
segway/combined.log
Normal file
1776
segway/combined.log
Normal file
File diff suppressed because it is too large
Load Diff
47
segway/package.json
Normal file
47
segway/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "segway",
|
||||
"version": "0.0.1",
|
||||
"description": "An integration service syncs plane data with external sources.",
|
||||
"author": "plane team",
|
||||
"license": "ISC",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "npx tsc",
|
||||
"start": "node dist/start.js",
|
||||
"dev": "concurrently \"npx tsc --watch\" \"nodemon -q dist/start.js\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@octokit/rest": "^20.0.2",
|
||||
"@overnightjs/core": "^1.7.6",
|
||||
"@sentry/node": "^7.73.0",
|
||||
"@sentry/tracing": "^7.73.0",
|
||||
"amqplib": "^0.10.3",
|
||||
"axios": "^1.6.2",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.3.1",
|
||||
"drizzle-orm": "^0.29.1",
|
||||
"express": "^4.18.2",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"node-fetch": "^3.3.2",
|
||||
"octokit": "^3.1.2",
|
||||
"postgres": "^3.4.1",
|
||||
"showdown": "^2.1.0",
|
||||
"uuid": "^9.0.1",
|
||||
"winston": "^3.10.0",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/amqplib": "^0.10.4",
|
||||
"@types/cors": "^2.8.14",
|
||||
"@types/express": "^4.17.18",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/node": "^20.8.3",
|
||||
"@types/node-fetch": "^2.6.9",
|
||||
"@types/pg": "^8.10.9",
|
||||
"@types/showdown": "^2.0.6",
|
||||
"concurrently": "^8.2.1",
|
||||
"drizzle-kit": "^0.20.6",
|
||||
"nodemon": "^3.0.1",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
||||
435
segway/src/controller/github.controller.ts
Normal file
435
segway/src/controller/github.controller.ts
Normal file
@@ -0,0 +1,435 @@
|
||||
// express
|
||||
import { Request, Response } from "express";
|
||||
// overnight js
|
||||
import { Controller, Post, Middleware } from "@overnightjs/core";
|
||||
// postgres
|
||||
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
|
||||
// showdown
|
||||
import showdown from "showdown";
|
||||
// octokit
|
||||
import { Octokit } from "octokit";
|
||||
import { getOctokit } from "../utils/github.authentication";
|
||||
// logger
|
||||
import { logger } from "../utils/logger";
|
||||
// mq
|
||||
import { MQSingleton } from "../mq/singleton";
|
||||
// middleware
|
||||
import AuthKeyMiddleware from "../middleware/authkey.middleware";
|
||||
|
||||
@Controller("api/github")
|
||||
export class GithubController {
|
||||
/**
|
||||
* This controller houses all routes for the Github Importer/Integration
|
||||
*/
|
||||
// Initialize database and mq
|
||||
db: PostgresJsDatabase;
|
||||
mq: MQSingleton;
|
||||
constructor(db: PostgresJsDatabase, mq: MQSingleton) {
|
||||
this.db = db;
|
||||
this.mq = mq;
|
||||
}
|
||||
|
||||
private getAllEntities = async (
|
||||
octokit: Octokit,
|
||||
requestPath: string,
|
||||
requestParams: any
|
||||
) => {
|
||||
let page = 1;
|
||||
let results;
|
||||
|
||||
const returnData = [];
|
||||
|
||||
do {
|
||||
results = await octokit.request(requestPath, { ...requestParams, page });
|
||||
returnData.push(...results.data);
|
||||
page++;
|
||||
} while (results.data.length !== 0);
|
||||
|
||||
return returnData;
|
||||
};
|
||||
|
||||
private githubCommentCreator = (
|
||||
issue_number: number,
|
||||
comments: { [key: string]: any }[]
|
||||
) => {
|
||||
const bulk_comments: { [key: string]: string | number }[] = [];
|
||||
const converter = new showdown.Converter({ optionKey: "value" });
|
||||
|
||||
comments.forEach((comment) => {
|
||||
if (
|
||||
parseInt(
|
||||
comment.issue_url.substring(comment.issue_url.lastIndexOf("/") + 1)
|
||||
) === issue_number
|
||||
) {
|
||||
bulk_comments.push({
|
||||
external_id: comment.id,
|
||||
external_source: "github",
|
||||
comment_html:
|
||||
comment.body === null
|
||||
? "<p></p>"
|
||||
: converter.makeHtml(comment.body),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return bulk_comments;
|
||||
};
|
||||
|
||||
private githubLabelCreator = (
|
||||
issue_number: number,
|
||||
labels: (string | { [key: string]: any })[]
|
||||
) => {
|
||||
const issueLabels: { [key: string]: string | number }[] = [];
|
||||
|
||||
labels.forEach((label) =>
|
||||
issueLabels.push({
|
||||
name: typeof label === "object" && label !== null ? label.name : label,
|
||||
})
|
||||
);
|
||||
|
||||
return issueLabels;
|
||||
};
|
||||
|
||||
@Post("")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async home(req: Request, res: Response) {
|
||||
try {
|
||||
const { owner, repo, installationId } = req.body;
|
||||
|
||||
// Get the octokit instance
|
||||
const octokit = await getOctokit(installationId);
|
||||
|
||||
// Fetch open issues
|
||||
const openIssuesResponse = await octokit.request("GET /search/issues", {
|
||||
q: `repo:${owner}/${repo} type:issue state:open`,
|
||||
});
|
||||
const openIssuesCount = openIssuesResponse.data.total_count;
|
||||
|
||||
// Fetch closed issues
|
||||
const closedIssuesResponse = await octokit.request("GET /search/issues", {
|
||||
q: `repo:${owner}/${repo} type:issue state:closed`,
|
||||
});
|
||||
const closedIssuesCount = closedIssuesResponse.data.total_count;
|
||||
|
||||
// Calculate total issues
|
||||
const totalIssues = openIssuesCount + closedIssuesCount;
|
||||
|
||||
// Fetch total labels count
|
||||
const labels = await this.getAllEntities(
|
||||
octokit,
|
||||
"GET /repos/{owner}/{repo}/labels",
|
||||
{ owner, repo }
|
||||
);
|
||||
|
||||
// Fetch total collaborators count
|
||||
const collaborators = await this.getAllEntities(
|
||||
octokit,
|
||||
"GET /repos/{owner}/{repo}/collaborators",
|
||||
{ owner, repo }
|
||||
);
|
||||
|
||||
const labelCount = labels.length;
|
||||
|
||||
return res.status(200).json({
|
||||
issue_count: totalIssues,
|
||||
labels: labelCount,
|
||||
collaborators,
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error("Failed to fetch importer summary", error);
|
||||
return res.json({ message: "Server error", status: 500, error: error });
|
||||
}
|
||||
}
|
||||
|
||||
@Post("metadata")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async metadata(req: Request, res: Response) {
|
||||
try {
|
||||
const { installationId } = req.body;
|
||||
|
||||
// Get the octokit instance
|
||||
const octokit = await getOctokit(installationId);
|
||||
|
||||
const { data } = await octokit.request("GET /app", {
|
||||
headers: {
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
});
|
||||
return res.status(200).json(data);
|
||||
} catch (error) {
|
||||
logger.error("Failed to fetch metadata", error);
|
||||
return res.json({ message: "Server error", status: 500, error: error });
|
||||
}
|
||||
}
|
||||
|
||||
@Post("repos")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async repos(req: Request, res: Response) {
|
||||
try {
|
||||
const { installationId, page } = req.body;
|
||||
|
||||
// Get the octokit instance
|
||||
const octokit = await getOctokit(installationId);
|
||||
|
||||
const { data } = await octokit.request("GET /installation/repositories", {
|
||||
page: page,
|
||||
headers: {
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
});
|
||||
|
||||
return res.status(200).json(data);
|
||||
} catch (error) {
|
||||
logger.error("Failed to fetch repos", error);
|
||||
return res.json({ message: "Server error", status: 500, error: error });
|
||||
}
|
||||
}
|
||||
|
||||
@Post("import")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async import(req: Request, res: Response) {
|
||||
const {
|
||||
metadata: { owner, name: repo },
|
||||
data: { users },
|
||||
config: { installation_id, sync },
|
||||
workspace_id,
|
||||
project_id,
|
||||
created_by,
|
||||
importer_id,
|
||||
} = req.body;
|
||||
|
||||
try {
|
||||
res.status(200).json({
|
||||
message: "Successful",
|
||||
});
|
||||
|
||||
// Get the octokit instance
|
||||
const octokit = await getOctokit(installation_id);
|
||||
|
||||
// Markdown converter
|
||||
const converter = new showdown.Converter({ optionKey: "value" });
|
||||
|
||||
// users
|
||||
const members = [];
|
||||
for (const user of users) {
|
||||
if (user?.import == "invite" || user?.import == "map") {
|
||||
const githubMembers = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "member.sync",
|
||||
email: user.email,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
members.push(user);
|
||||
this.mq?.publish(
|
||||
githubMembers,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Labels
|
||||
const githubLabels = await octokit.paginate(
|
||||
octokit.rest.issues.listLabelsForRepo,
|
||||
{
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
headers: {
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
per_page: 100,
|
||||
}
|
||||
);
|
||||
for await (const label of githubLabels) {
|
||||
const labelSync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "label.sync",
|
||||
external_source: "github",
|
||||
external_id: label.id,
|
||||
color: `#${label.color}`,
|
||||
name: label.name,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
this.mq?.publish(labelSync, "plane.bgtasks.importer_task.import_task");
|
||||
}
|
||||
|
||||
// Issues
|
||||
const githubIssues = await octokit.paginate(
|
||||
octokit.rest.issues.listForRepo,
|
||||
{
|
||||
state: "all",
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
headers: {
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
per_page: 100,
|
||||
sort: "created",
|
||||
direction: "asc",
|
||||
}
|
||||
);
|
||||
|
||||
// Issue comments
|
||||
const comments = [];
|
||||
const githubComments = await octokit.paginate(
|
||||
octokit.rest.issues.listCommentsForRepo,
|
||||
{
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
headers: {
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
},
|
||||
per_page: 100,
|
||||
}
|
||||
);
|
||||
for await (const comment of githubComments) {
|
||||
comments.push(comment);
|
||||
}
|
||||
|
||||
for await (const issue of githubIssues) {
|
||||
if (!("pull_request" in issue)) {
|
||||
const description_html = await converter.makeHtml(
|
||||
issue?.body_html || "<p><p>"
|
||||
);
|
||||
|
||||
const issueSync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "issue.sync",
|
||||
name: issue.title,
|
||||
description_html: description_html,
|
||||
state: issue.state,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
external_id: issue.id,
|
||||
external_source: "github",
|
||||
link: {
|
||||
title: `Original Issue in Github ${issue.number}`,
|
||||
url: issue.html_url,
|
||||
},
|
||||
parent_id: null,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
},
|
||||
};
|
||||
// Push the issue
|
||||
this.mq?.publish(issueSync, "plane.bgtasks.importer_task.import_task");
|
||||
|
||||
// Push the comments
|
||||
const githubIssueComments = this.githubCommentCreator(
|
||||
issue.number,
|
||||
comments
|
||||
);
|
||||
|
||||
githubIssueComments.forEach((githubIssueComment) => {
|
||||
const commentSync = {
|
||||
args: [],
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "issue.comment.sync",
|
||||
comment_html: githubIssueComment.comment_html,
|
||||
external_source: githubIssueComment.external_source,
|
||||
external_id: githubIssueComment.external_id,
|
||||
external_issue_id: issue.id,
|
||||
external_issue_source: "github",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
},
|
||||
};
|
||||
// push to queue
|
||||
this.mq?.publish(
|
||||
commentSync,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
});
|
||||
|
||||
// Push the labels
|
||||
const githubLabels = this.githubLabelCreator(
|
||||
issue.number,
|
||||
issue.labels
|
||||
);
|
||||
githubLabels.forEach((githubLabel) => {
|
||||
const labelSync = {
|
||||
args: [],
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "issue.label.sync",
|
||||
name: githubLabel.name,
|
||||
external_issue_id: issue.id,
|
||||
external_issue_source: "github",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
},
|
||||
};
|
||||
//Push to queue
|
||||
this.mq?.publish(
|
||||
labelSync,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const import_sync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "import.sync",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
status: "completed",
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
|
||||
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
|
||||
|
||||
return;
|
||||
} catch (error) {
|
||||
logger.error("Import failed", error);
|
||||
const import_sync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "import.sync",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by_id: created_by,
|
||||
importer_id: importer_id,
|
||||
status: "failed",
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
|
||||
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
|
||||
return res.json({ message: "Server error", status: 500, error: error });
|
||||
}
|
||||
}
|
||||
}
|
||||
3
segway/src/controller/index.ts
Normal file
3
segway/src/controller/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./jira.controller";
|
||||
export * from "./slack.controller"
|
||||
export * from "./github.controller"
|
||||
434
segway/src/controller/jira.controller.ts
Normal file
434
segway/src/controller/jira.controller.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
// overnight js
|
||||
import { Request, Response } from "express";
|
||||
import { Controller, Post, Middleware } from "@overnightjs/core";
|
||||
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
|
||||
// mq
|
||||
import { MQSingleton } from "../mq/singleton";
|
||||
// middleware
|
||||
import AuthKeyMiddleware from "../middleware/authkey.middleware";
|
||||
// axios
|
||||
import axios, { AxiosResponse } from "axios";
|
||||
|
||||
import { loadIssues, loadComments } from "../utils/paginator";
|
||||
|
||||
@Controller("api/jira")
|
||||
export class JiraController {
|
||||
/**
|
||||
* This controller houses all routes for the Jira Importer
|
||||
*/
|
||||
|
||||
// Initialize database and mq
|
||||
db: PostgresJsDatabase;
|
||||
mq: MQSingleton;
|
||||
constructor(db: PostgresJsDatabase, mq: MQSingleton) {
|
||||
this.db = db;
|
||||
this.mq = mq;
|
||||
}
|
||||
|
||||
@Post("")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async home(req: Request, res: Response) {
|
||||
try {
|
||||
const { email, api_token, project_key, cloud_hostname } = req.body;
|
||||
const auth = {
|
||||
username: email,
|
||||
password: api_token,
|
||||
};
|
||||
const headers = {
|
||||
Accept: "application/json",
|
||||
};
|
||||
|
||||
// Constructing URLs
|
||||
const issueUrl = `https://${cloud_hostname}/rest/api/3/search?jql=project=${project_key}`;
|
||||
const moduleUrl = `https://${cloud_hostname}/rest/api/3/search?jql=project=${project_key}`;
|
||||
const statusUrl = `https://${cloud_hostname}/rest/api/3/status/?jql=project={project_key}`;
|
||||
const labelsUrl = `https://${cloud_hostname}/rest/api/3/label/?jql=project=${project_key}`;
|
||||
const usersUrl = `https://${cloud_hostname}/rest/api/3/users/search?jql=project=${project_key}`;
|
||||
|
||||
// Making requests
|
||||
const [
|
||||
issueResponse,
|
||||
moduleResponse,
|
||||
statusResponse,
|
||||
labelsResponse,
|
||||
usersResponse,
|
||||
] = await Promise.all([
|
||||
axios.get(issueUrl, { auth, headers }),
|
||||
axios.get(moduleUrl, { auth, headers }),
|
||||
axios.get(statusUrl, { auth, headers }),
|
||||
axios.get(labelsUrl, { auth, headers }),
|
||||
axios.get(usersUrl, { auth, headers }),
|
||||
]);
|
||||
|
||||
const issuesTotal = issueResponse.data.total;
|
||||
const modulesTotal = moduleResponse.data.total;
|
||||
const labelsTotal = labelsResponse.data.total;
|
||||
const statusCount = statusResponse.data.length;
|
||||
|
||||
const usersData = usersResponse.data.filter(
|
||||
(user: any) => user.accountType === "atlassian"
|
||||
);
|
||||
|
||||
res.status(200).json({
|
||||
issues: issuesTotal,
|
||||
modules: modulesTotal,
|
||||
labels: labelsTotal,
|
||||
states: statusCount,
|
||||
users: usersData,
|
||||
});
|
||||
|
||||
return;
|
||||
} catch (error) {
|
||||
return res.json({ message: "Server error", status: 500, error: error });
|
||||
}
|
||||
}
|
||||
|
||||
@Post("import")
|
||||
@Middleware([AuthKeyMiddleware])
|
||||
private async import(req: Request, res: Response) {
|
||||
try {
|
||||
res.status(200).json({
|
||||
message: "Successful",
|
||||
});
|
||||
|
||||
// const result = await this.db.select().from('users');
|
||||
const { email, api_token, project_key, cloud_hostname } =
|
||||
req.body.metadata;
|
||||
|
||||
const auth = {
|
||||
username: email,
|
||||
password: api_token,
|
||||
};
|
||||
|
||||
const headers = {
|
||||
Accept: "application/json",
|
||||
};
|
||||
|
||||
const workspace_id = req.body.workspace_id;
|
||||
const project_id = req.body.project_id;
|
||||
const created_by = req.body.created_by;
|
||||
const importer_id = req.body.importer_id;
|
||||
|
||||
const users = req.body.data.users;
|
||||
|
||||
// users
|
||||
const members = [];
|
||||
for (const user of users) {
|
||||
if (user?.import == "invite" || user?.import == "map") {
|
||||
const jira_members = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "member.sync",
|
||||
email: user.email,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
members.push(user);
|
||||
this.mq?.publish(
|
||||
jira_members,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// labels
|
||||
const labelsUrl = `https://${cloud_hostname}/rest/api/3/label/?jql=project=${project_key}`;
|
||||
const labelsResponse = await axios.get(labelsUrl, { auth, headers });
|
||||
const labels = labelsResponse.data.values;
|
||||
for (const label of labels) {
|
||||
const labelssync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
external_source: "jira",
|
||||
type: "label.sync",
|
||||
name: label,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
this.mq?.publish(labelssync, "plane.bgtasks.importer_task.import_task");
|
||||
}
|
||||
|
||||
// states
|
||||
const statusUrl = `https://${cloud_hostname}/rest/api/3/project/${project_key}/statuses`;
|
||||
const response = await axios.get(statusUrl, { auth, headers });
|
||||
|
||||
if (response && response.data && response.data.length) {
|
||||
const statusData = response.data[0];
|
||||
if (statusData && statusData.statuses) {
|
||||
for (const statusCategory of statusData.statuses) {
|
||||
const state_name = statusCategory.name;
|
||||
const state_group =
|
||||
statusCategory.statusCategory.name === "To Do"
|
||||
? "unstarted"
|
||||
: statusCategory.statusCategory.name === "In Progress"
|
||||
? "started"
|
||||
: statusCategory.statusCategory.name === "Done"
|
||||
? "completed"
|
||||
: statusCategory.statusCategory.name;
|
||||
const statessync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "state.sync",
|
||||
state_name: state_name,
|
||||
state_group: state_group,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
external_id: statusCategory.id,
|
||||
external_source: "jira",
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
this.mq?.publish(
|
||||
statessync,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const modules = [];
|
||||
const child_issues = [];
|
||||
const module_issues = [];
|
||||
|
||||
const url = `https://${cloud_hostname}/rest/api/3/search/?jql=project=${project_key}&fields=comment, issuetype, summary, description, assignee, priority, status, labels, duedate, parent, parentEpic&maxResults=100&expand=renderedFields`;
|
||||
|
||||
for await (const issue of loadIssues(url, auth)) {
|
||||
if (issue.fields.parent) {
|
||||
if (issue.fields.parent?.fields?.issuetype?.name == "Epic") {
|
||||
module_issues.push({
|
||||
issue_id: issue.id,
|
||||
module_id: issue.fields.parent?.id,
|
||||
});
|
||||
} else {
|
||||
child_issues.push(issue);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// skipping all the epics
|
||||
if (issue.fields.issuetype.name === "Epic") {
|
||||
modules.push(issue);
|
||||
continue;
|
||||
}
|
||||
|
||||
const user = members.find(
|
||||
(user) => user.username === issue.fields.assignee?.displayName
|
||||
);
|
||||
|
||||
// issue comments
|
||||
const comments_list = [];
|
||||
const comment_url = `https://${cloud_hostname}/rest/api/3/issue/${issue.id}/comment?expand=renderedBody`;
|
||||
const commentResponse = await axios.get(comment_url, { auth, headers });
|
||||
if (
|
||||
commentResponse &&
|
||||
commentResponse.data &&
|
||||
commentResponse.data.total
|
||||
) {
|
||||
for await (const comment of loadComments(comment_url, auth)) {
|
||||
comments_list.push({
|
||||
comment_html:
|
||||
comment.renderedBody === "" ? "<p></p>" : comment.renderedBody,
|
||||
created_by: comment.updateAuthor.emailAddress,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const issuessync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "issue.sync",
|
||||
name: issue.fields.summary.substring(0, 250),
|
||||
description_html: issue.renderedFields.description ?? null,
|
||||
assignee: user?.email,
|
||||
state: issue.fields.status.name,
|
||||
priority:
|
||||
issue.fields.priority.name.toLowerCase() === "medium"
|
||||
? "medium"
|
||||
: issue.fields.priority.name.toLowerCase() === "highest"
|
||||
? "high"
|
||||
: "low",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
external_id: issue.id,
|
||||
external_source: "jira",
|
||||
comments_list: comments_list,
|
||||
target_date: issue.fields.duedate,
|
||||
link: {
|
||||
title: `Original Issue in Jira ${issue.key}`,
|
||||
url: `https://${cloud_hostname}/browse/${issue.key}`,
|
||||
},
|
||||
labels_list: issue.fields.labels,
|
||||
parent_id: null,
|
||||
importer_id: importer_id,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.mq?.publish(issuessync, "plane.bgtasks.importer_task.issue_sync");
|
||||
}
|
||||
|
||||
for (const issue of child_issues) {
|
||||
const user = members.find(
|
||||
(user) => user.username === issue.fields.assignee?.displayName
|
||||
);
|
||||
|
||||
// issue comments
|
||||
const comments_list = [];
|
||||
const comment_url = `https://${cloud_hostname}/rest/api/3/issue/${issue.id}/comment?expand=renderedBody`;
|
||||
const commentResponse = await axios.get(comment_url, { auth, headers });
|
||||
if (
|
||||
commentResponse &&
|
||||
commentResponse.data &&
|
||||
commentResponse.data.total
|
||||
) {
|
||||
for await (const comment of loadComments(comment_url, auth)) {
|
||||
comments_list.push({
|
||||
comment_html:
|
||||
comment.renderedBody === "" ? "<p></p>" : comment.renderedBody,
|
||||
created_by: comment.updateAuthor.emailAddress,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const issuessync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "issue.create",
|
||||
name: issue.fields.summary.substring(0, 250),
|
||||
description_html: issue.renderedFields?.description,
|
||||
assignee: user?.email,
|
||||
state: issue.fields.status.name,
|
||||
priority:
|
||||
issue.fields.priority.name.toLowerCase() === "medium"
|
||||
? "medium"
|
||||
: issue.fields.priority.name.toLowerCase() === "highest"
|
||||
? "high"
|
||||
: "low",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
external_id: issue.id,
|
||||
external_source: "jira",
|
||||
comments_list: comments_list,
|
||||
target_date: issue.fields.duedate,
|
||||
link: {
|
||||
title: `Original Issue in Jira ${issue.key}`,
|
||||
url: `https://${cloud_hostname}/browse/${issue.key}`,
|
||||
},
|
||||
labels_list: issue.fields.labels,
|
||||
parent_id: issue.fields.parent.id,
|
||||
},
|
||||
},
|
||||
};
|
||||
this.mq?.publish(issuessync, "plane.bgtasks.importer_task.issue_sync");
|
||||
}
|
||||
|
||||
// modules
|
||||
for (const module of modules) {
|
||||
const modulessync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "module.sync",
|
||||
name: module.fields.summary.substring(0, 250),
|
||||
description_html: module.renderedFields?.description,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
external_id: module.id,
|
||||
external_source: "jira",
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
this.mq?.publish(
|
||||
modulessync,
|
||||
"plane.bgtasks.importer_task.import_task"
|
||||
);
|
||||
}
|
||||
|
||||
for (const module_issue of module_issues) {
|
||||
const modules_issue_sync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "module.issue.sync",
|
||||
module_id: module_issue.module_id,
|
||||
issue_id: module_issue.issue_id,
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
external_source: "jira",
|
||||
importer_id: importer_id,
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
this.mq?.publish(
|
||||
modules_issue_sync,
|
||||
"plane.bgtasks.importer_task.importer_task"
|
||||
);
|
||||
}
|
||||
|
||||
const import_sync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "import.sync",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
importer_id: importer_id,
|
||||
status: "completed",
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
|
||||
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
|
||||
|
||||
return;
|
||||
} catch (error) {
|
||||
const workspace_id = req.body.workspace_id;
|
||||
const project_id = req.body.project_id;
|
||||
const created_by = req.body.created_by;
|
||||
const importer_id = req.body.importer_id;
|
||||
const import_sync = {
|
||||
args: [], // args
|
||||
kwargs: {
|
||||
data: {
|
||||
type: "import.sync",
|
||||
workspace_id: workspace_id,
|
||||
project_id: project_id,
|
||||
created_by: created_by,
|
||||
importer_id: importer_id,
|
||||
status: "failed",
|
||||
},
|
||||
}, // kwargs
|
||||
other_data: {}, // other data
|
||||
};
|
||||
|
||||
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
|
||||
|
||||
return res.json({ message: "Server error", error: error });
|
||||
}
|
||||
}
|
||||
}
|
||||
10
segway/src/controller/slack.controller.ts
Normal file
10
segway/src/controller/slack.controller.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { Request, Response } from "express";
|
||||
import { Controller, Get } from "@overnightjs/core";
|
||||
|
||||
|
||||
@Controller("/api/slack")
|
||||
export class SlackController {
|
||||
/**
|
||||
* This controller houses all routes for the Slack Integration
|
||||
*/
|
||||
}
|
||||
33
segway/src/db/singleton.ts
Normal file
33
segway/src/db/singleton.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { drizzle, PostgresJsDatabase } from "drizzle-orm/postgres-js";
|
||||
import postgres from "postgres";
|
||||
// logger
|
||||
import { logger } from "../utils/logger"
|
||||
|
||||
export class DatabaseSingleton {
|
||||
private static instance: DatabaseSingleton;
|
||||
public db: PostgresJsDatabase | null = null;
|
||||
|
||||
private constructor() {
|
||||
try {
|
||||
// Ensure the DATABASE_URL is provided
|
||||
if (!process.env.DATABASE_URL) {
|
||||
throw new Error("DATABASE_URL environment variable is not set.");
|
||||
}
|
||||
|
||||
const queryClient = postgres(process.env.DATABASE_URL);
|
||||
this.db = drizzle(queryClient);
|
||||
logger.info("🛢️ Connected to Database")
|
||||
} catch (error) {
|
||||
logger.error("Failed to initialize database connection:", error);
|
||||
throw new Error("Could not connect to Database")
|
||||
}
|
||||
}
|
||||
|
||||
public static getInstance(): DatabaseSingleton {
|
||||
if (!DatabaseSingleton.instance) {
|
||||
DatabaseSingleton.instance = new DatabaseSingleton();
|
||||
}
|
||||
|
||||
return DatabaseSingleton.instance;
|
||||
}
|
||||
}
|
||||
22
segway/src/middleware/authkey.middleware.ts
Normal file
22
segway/src/middleware/authkey.middleware.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import { RequestHandler } from "express";
|
||||
import { logger } from "../utils/logger";
|
||||
|
||||
const AuthKeyMiddlware: RequestHandler = (req, res, next) => {
|
||||
// Retrieve the API key from the request header
|
||||
const apiKey = req.headers["x-api-key"];
|
||||
|
||||
// Define the expected API key
|
||||
const expectedApiKey = process.env.SEGWAY_KEY;
|
||||
|
||||
// Check if the API key is present and matches the expected key
|
||||
if (apiKey === expectedApiKey) {
|
||||
// If the key is valid, proceed with the next middleware or route handler
|
||||
next();
|
||||
} else {
|
||||
// If the key is invalid, log the error and send an appropriate response
|
||||
logger.error("Invalid API key");
|
||||
res.status(401).json({ message: "Invalid API key" });
|
||||
}
|
||||
};
|
||||
|
||||
export default AuthKeyMiddlware;
|
||||
8
segway/src/middleware/logger.middleware.ts
Normal file
8
segway/src/middleware/logger.middleware.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { RequestHandler } from "express";
|
||||
import { logger } from "../utils/logger";
|
||||
|
||||
const loggerMiddleware: RequestHandler = (req, res, next) => {
|
||||
logger.info(`${req.method}: ${req.path}`);
|
||||
next();
|
||||
};
|
||||
export default loggerMiddleware;
|
||||
99
segway/src/mq/singleton.ts
Normal file
99
segway/src/mq/singleton.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
//uuid
|
||||
import { v4 as uuidv4 } from "uuid"
|
||||
// mq
|
||||
import { Connection, Channel, connect, ConsumeMessage } from "amqplib";
|
||||
// utils
|
||||
import { logger } from "../utils/logger";
|
||||
|
||||
export class MQSingleton {
|
||||
private static instance: MQSingleton;
|
||||
private connection: Connection | null = null;
|
||||
public channel: Channel | null = null;
|
||||
|
||||
private constructor() {}
|
||||
|
||||
// Get the current instance
|
||||
public static getInstance(): MQSingleton {
|
||||
if (!this.instance) {
|
||||
this.instance = new MQSingleton();
|
||||
}
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
// Initialize instance
|
||||
public async initialize(): Promise<void> {
|
||||
if (!this.connection || !this.channel) {
|
||||
await this.init();
|
||||
}
|
||||
}
|
||||
|
||||
private async init(): Promise<void> {
|
||||
const rabbitMqUrl = process.env.RABBITMQ_URL || "";
|
||||
try {
|
||||
this.connection = await connect(rabbitMqUrl);
|
||||
logger.info(`✅ Rabbit MQ Connection is ready`);
|
||||
this.channel = await this.connection.createChannel();
|
||||
logger.info(`🛸 Created RabbitMQ Channel successfully`);
|
||||
} catch (error) {
|
||||
console.error("Error in initializing RabbitMQ:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Send the message to the given queue
|
||||
public async publish(body: object, taskName: string): Promise<void> {
|
||||
|
||||
// Check if the channel exists
|
||||
if (!this.channel) {
|
||||
throw new Error("Channel not initialized");
|
||||
}
|
||||
|
||||
// Initialize the queue variables
|
||||
const queue = "segway_tasks";
|
||||
const exchange = "segway_exchange";
|
||||
const routingKey = "segway";
|
||||
|
||||
// Create this message
|
||||
const msg = {
|
||||
contentType: "application/json",
|
||||
contentEncoding: "utf-8",
|
||||
headers: {
|
||||
id: uuidv4(),
|
||||
task: taskName,
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
};
|
||||
|
||||
// Assert the queue
|
||||
await this.channel.assertExchange(exchange, "direct", { durable: true });
|
||||
await this.channel.assertQueue(queue, { durable: true });
|
||||
await this.channel.bindQueue(queue, exchange, routingKey);
|
||||
|
||||
// Try publishing the message
|
||||
try {
|
||||
this.channel.publish(exchange, routingKey, Buffer.from(msg.body), {
|
||||
contentType: msg.contentType,
|
||||
contentEncoding: msg.contentEncoding,
|
||||
headers: msg.headers
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error publishing message:", error);
|
||||
}
|
||||
}
|
||||
|
||||
// Receive the message from the given queue
|
||||
public async consume(
|
||||
queue: string,
|
||||
callback: (msg: ConsumeMessage | null) => void
|
||||
): Promise<void> {
|
||||
if (!this.channel) {
|
||||
throw new Error("Channel not initialized");
|
||||
}
|
||||
logger.info("👂 Listening for incoming events");
|
||||
const exchange = "django_exchange";
|
||||
const routingKey = "django.node";
|
||||
await this.channel.assertExchange(exchange, "direct", { durable: true });
|
||||
await this.channel.assertQueue(queue, { durable: true });
|
||||
await this.channel.bindQueue(queue, exchange, routingKey);
|
||||
await this.channel.consume(queue, callback, { noAck: true });
|
||||
}
|
||||
}
|
||||
161
segway/src/server.ts
Normal file
161
segway/src/server.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
import express from "express";
|
||||
import { Server } from "@overnightjs/core";
|
||||
import cors from "cors";
|
||||
import * as Sentry from "@sentry/node";
|
||||
import * as Tracing from "@sentry/tracing";
|
||||
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
|
||||
// controllers
|
||||
import * as controllers from "./controller";
|
||||
// middlewares
|
||||
import loggerMiddleware from "./middleware/logger.middleware";
|
||||
// utils
|
||||
import { logger } from "./utils/logger";
|
||||
// db
|
||||
import { DatabaseSingleton } from "./db/singleton";
|
||||
// mq
|
||||
import { MQSingleton } from "./mq/singleton";
|
||||
|
||||
class ApiServer extends Server {
|
||||
private readonly SERVER_STARTED = "🚀 Api server started on port: ";
|
||||
SERVER_PORT: number;
|
||||
db: PostgresJsDatabase | null = null;
|
||||
mq: MQSingleton | null = null; // Declare the channel property
|
||||
|
||||
constructor() {
|
||||
super(true);
|
||||
// disabling overnight logs
|
||||
this.showLogs = false;
|
||||
// enabling env variable from .env file
|
||||
dotenv.config();
|
||||
// assigning port
|
||||
this.SERVER_PORT = process.env.SERVER_PORT
|
||||
? parseInt(process.env.SERVER_PORT, 10)
|
||||
: 8080;
|
||||
// logger
|
||||
this.app.use(loggerMiddleware);
|
||||
// exposing public folder for static files.
|
||||
this.app.use(express.static("public"));
|
||||
// body parser
|
||||
this.app.use(express.json());
|
||||
this.app.use(express.urlencoded({ extended: true }));
|
||||
// views engine
|
||||
this.app.set("views", path.join(__dirname, "views"));
|
||||
this.app.set("view engine", "hbs");
|
||||
// cors
|
||||
this.app.use(cors());
|
||||
// setup mq
|
||||
this.setupMQ();
|
||||
// sentry setup
|
||||
if (
|
||||
process.env.APP_ENV === "staging" ||
|
||||
process.env.APP_ENV === "production"
|
||||
) {
|
||||
// setting up error logging and tracing.
|
||||
this.setupSentryInit();
|
||||
}
|
||||
// setting up db
|
||||
this.setupDatabase();
|
||||
// setting up controllers
|
||||
this.setupControllers();
|
||||
// not found page
|
||||
this.setupNotFoundHandler();
|
||||
// setting up sentry error handling
|
||||
this.sentryErrorHandling();
|
||||
}
|
||||
|
||||
// get the current app instance
|
||||
public getAppInstance() {
|
||||
return this.app;
|
||||
}
|
||||
|
||||
// Setup the database
|
||||
private setupDatabase(): void {
|
||||
this.db = DatabaseSingleton.getInstance().db;
|
||||
}
|
||||
|
||||
// Setup MQ and initialize channel
|
||||
private setupMQ(): void {
|
||||
this.mq = MQSingleton.getInstance();
|
||||
this.startMQAndWorkers().catch((error) =>
|
||||
logger.error("Error in startMQAndWorkers:", error)
|
||||
);
|
||||
}
|
||||
|
||||
// Start mq and workers
|
||||
private async startMQAndWorkers(): Promise<void> {
|
||||
try {
|
||||
await this.mq?.initialize();
|
||||
} catch (error) {
|
||||
logger.error("Failed to initialize MQ:", error);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// setup all the controllers
|
||||
private setupControllers(): void {
|
||||
const controllerInstances = [];
|
||||
for (const name in controllers) {
|
||||
if (Object.prototype.hasOwnProperty.call(controllers, name)) {
|
||||
const Controller = (controllers as any)[name];
|
||||
controllerInstances.push(new Controller(this.db, this.mq));
|
||||
}
|
||||
}
|
||||
super.addControllers(controllerInstances);
|
||||
}
|
||||
|
||||
|
||||
// This controller will return 404 for not found pages
|
||||
private setupNotFoundHandler(): void {
|
||||
this.app.use((req, res) => {
|
||||
res.status(404).json({
|
||||
status: "error",
|
||||
message: "Not Found",
|
||||
path: req.path,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private setupSentryInit() {
|
||||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN,
|
||||
integrations: [
|
||||
// enable HTTP calls tracing
|
||||
new Sentry.Integrations.Http({ tracing: true }),
|
||||
// enable Express.js middleware tracing
|
||||
new Tracing.Integrations.Express({ app: this.app }),
|
||||
],
|
||||
// Set tracesSampleRate to 1.0 to capture 100%
|
||||
// of transactions for performance monitoring.
|
||||
// We recommend adjusting this value in production
|
||||
tracesSampleRate: 1.0,
|
||||
});
|
||||
|
||||
// RequestHandler creates a separate execution context using domains, so that every
|
||||
// transaction/span/breadcrumb is attached to its own Hub instance
|
||||
this.app.use(Sentry.Handlers.requestHandler());
|
||||
// TracingHandler creates a trace for every incoming request
|
||||
this.app.use(Sentry.Handlers.tracingHandler());
|
||||
}
|
||||
|
||||
private sentryErrorHandling() {
|
||||
// The error handler must be before any other error middleware and after all controllers
|
||||
this.app.use(Sentry.Handlers.errorHandler());
|
||||
|
||||
this.app.use(function onError(req, res: any) {
|
||||
// The error id is attached to `res.sentry` to be returned
|
||||
// and optionally displayed to the user for support.
|
||||
res.statusCode = 500;
|
||||
res.end(res.sentry + "\n");
|
||||
});
|
||||
}
|
||||
|
||||
public start(port: number): void {
|
||||
this.app.listen(port, () => {
|
||||
logger.info(this.SERVER_STARTED + port);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default ApiServer;
|
||||
5
segway/src/start.ts
Normal file
5
segway/src/start.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import ApiServer from "./server";
|
||||
|
||||
const apiServer = new ApiServer();
|
||||
// starting server
|
||||
apiServer.start(apiServer.SERVER_PORT);
|
||||
25
segway/src/utils/github.authentication.ts
Normal file
25
segway/src/utils/github.authentication.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { Octokit } from 'octokit'
|
||||
import { createAppAuth } from '@octokit/auth-app'
|
||||
|
||||
|
||||
export const getOctokit = async (installationId: number): Promise<Octokit> => {
|
||||
|
||||
const appId = process.env.GITHUB_APP_ID || "";
|
||||
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY || "";
|
||||
|
||||
if (!privateKey || !appId) {
|
||||
throw new Error("Private key and App ID not found in environment variables.");
|
||||
}
|
||||
|
||||
// Initiate the octokit
|
||||
const octokit = new Octokit({
|
||||
authStrategy: createAppAuth,
|
||||
auth: {
|
||||
appId: appId,
|
||||
privateKey: privateKey,
|
||||
installationId: installationId
|
||||
}
|
||||
})
|
||||
|
||||
return octokit;
|
||||
}
|
||||
8
segway/src/utils/logger.ts
Normal file
8
segway/src/utils/logger.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import winston from "winston";
|
||||
|
||||
export const logger = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Console(),
|
||||
new winston.transports.File({ filename: "combined.log" }),
|
||||
],
|
||||
});
|
||||
73
segway/src/utils/paginator.ts
Normal file
73
segway/src/utils/paginator.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import axios from "axios";
|
||||
|
||||
async function* pageThroughIssues(endpoint: string, auth: any) {
|
||||
async function* makeRequest(_endpoint: string): AsyncGenerator<any> {
|
||||
const response = await axios({
|
||||
url: _endpoint,
|
||||
method: "get",
|
||||
auth: auth,
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(await response.statusText);
|
||||
}
|
||||
|
||||
const page = await response.data;
|
||||
|
||||
yield page;
|
||||
|
||||
if (page.issues.length) {
|
||||
const url: string = `${endpoint}&startAt=${page.startAt + 100}`;
|
||||
yield* makeRequest(url);
|
||||
}
|
||||
}
|
||||
|
||||
yield* makeRequest(endpoint);
|
||||
}
|
||||
|
||||
export async function* loadIssues(url: any, auth: any) {
|
||||
const endpoint = url;
|
||||
const result = pageThroughIssues(endpoint, auth);
|
||||
|
||||
for await (const page of result) {
|
||||
for (const issue of page.issues) {
|
||||
yield issue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function* pageThroughComments(endpoint: any, auth: any) {
|
||||
async function* makeRequest(_endpoint: string): AsyncGenerator<any> {
|
||||
const response = await axios({
|
||||
url: _endpoint,
|
||||
method: "get",
|
||||
auth: auth,
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(await response.statusText);
|
||||
}
|
||||
|
||||
const page = await response.data;
|
||||
|
||||
yield page;
|
||||
|
||||
if (page.comments.length) {
|
||||
const url: string = `${endpoint}&startAt=${page.startAt + 100}`;
|
||||
yield* makeRequest(url);
|
||||
}
|
||||
}
|
||||
|
||||
yield* makeRequest(endpoint);
|
||||
}
|
||||
|
||||
export async function* loadComments(url: any, auth: any) {
|
||||
const endpoint = url;
|
||||
const result = pageThroughComments(endpoint, auth);
|
||||
|
||||
for await (const page of result) {
|
||||
for (const comment of page.comments) {
|
||||
yield comment;
|
||||
}
|
||||
}
|
||||
}
|
||||
42
segway/src/worker/base.worker.ts
Normal file
42
segway/src/worker/base.worker.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
// mq
|
||||
import { ConsumeMessage } from "amqplib";
|
||||
// mq single ton
|
||||
import { MQSingleton } from "../mq/singleton";
|
||||
// logger
|
||||
import { logger } from "../utils/logger";
|
||||
|
||||
export abstract class BaseWorker {
|
||||
mq: MQSingleton | null = null;
|
||||
|
||||
protected routingKey: string;
|
||||
|
||||
constructor(
|
||||
protected queueName: string,
|
||||
routingKey: string
|
||||
) {
|
||||
this.mq = MQSingleton.getInstance();
|
||||
this.routingKey = routingKey;
|
||||
this.onMessage = this.onMessage.bind(this);
|
||||
}
|
||||
|
||||
// Start the consumer
|
||||
public async start(): Promise<void> {
|
||||
try {
|
||||
this.mq?.consume(this.queueName, this.onMessage);
|
||||
} catch (error) {
|
||||
logger.error("Error starting workers");
|
||||
}
|
||||
}
|
||||
|
||||
// Publish this to queue
|
||||
protected async publish(body: object, taskName: string): Promise<void> {
|
||||
try {
|
||||
this.mq?.publish(body, taskName);
|
||||
} catch (error) {
|
||||
logger.error("Error sending to queue");
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract onMessage(msg: ConsumeMessage | null): void;
|
||||
|
||||
}
|
||||
9
segway/tsconfig.json
Normal file
9
segway/tsconfig.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "tsconfig/express.json",
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["node_modules"],
|
||||
"compilerOptions": {
|
||||
"baseUrl": "src/",
|
||||
"outDir": "./dist"
|
||||
}
|
||||
}
|
||||
2996
segway/yarn.lock
Normal file
2996
segway/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user