Compare commits

...

60 Commits

Author SHA1 Message Date
Henit Chobisa
8abd174314 feat: added notification handlers 2023-12-28 17:18:55 +05:30
Henit Chobisa
cf9a6f1e87 feat: added notification schema 2023-12-28 17:18:21 +05:30
Henit Chobisa
4c80602725 chore: updated handlers from slack handle 2023-12-28 17:18:06 +05:30
Henit Chobisa
b95443ae52 feat: updated service functions 2023-12-28 17:17:38 +05:30
Henit Chobisa
257e96fff0 feat: create notification generators 2023-12-28 17:17:30 +05:30
Henit Chobisa
09c3d48514 chore: merge conflicts resolved 2023-12-20 18:25:45 +05:30
Henit Chobisa
53a8b5e247 feat: merge github-importer 2023-12-20 18:20:53 +05:30
Henit Chobisa
324d21f613 Merge branch 'dev/github-importer' into dev/segway-slack-integration 2023-12-20 18:18:06 +05:30
Henit Chobisa
1f0e91ac32 feat: updated slack controller to handle submission responses 2023-12-20 18:15:07 +05:30
Henit Chobisa
827376bd21 feat: added create-issue view submit handler 2023-12-20 18:14:24 +05:30
pablohashescobar
baddeedcd1 dev: add imported data as well 2023-12-20 16:43:28 +05:30
pablohashescobar
b153416612 dev: updated retry logic when exception 2023-12-20 15:32:07 +05:30
Henit Chobisa
ebec779993 feat: added project-select-action handler in block actions 2023-12-20 15:31:19 +05:30
Henit Chobisa
482f35673d feat: added custom block action handler 2023-12-20 15:31:04 +05:30
Henit Chobisa
2ffc869f83 chore: updated slack service to return fetch calls and errors 2023-12-20 15:30:32 +05:30
Henit Chobisa
1bc94487ed chore: converted issue create modal into object argument 2023-12-20 15:29:46 +05:30
Henit Chobisa
db65df0fb0 chore: renamed slack option converter 2023-12-20 15:29:25 +05:30
pablohashescobar
e529fe60fb dev: update code structure and sync functions 2023-12-20 14:47:52 +05:30
pablohashescobar
312258912c dev: removed console log 2023-12-20 14:19:15 +05:30
Henit Chobisa
8ff181c0d6 feat: added slack payload action handlers 2023-12-20 13:51:02 +05:30
Henit Chobisa
e25f5064d1 chore: added segregated slack payload types 2023-12-20 13:50:43 +05:30
Henit Chobisa
f156177097 feat: added handlers for handling slack payload actions 2023-12-20 12:49:32 +05:30
Henit Chobisa
301e1205f4 chore: segregated schemas in their respective files 2023-12-20 12:16:01 +05:30
Henit Chobisa
eff68b8cd6 feat: added projectLabel schema and service function 2023-12-20 11:47:50 +05:30
Henit Chobisa
6b9a1672c2 feat: added service function to fetch project members 2023-12-20 11:04:31 +05:30
Henit Chobisa
0835b5788c feat: added projectMembers schema 2023-12-20 11:04:00 +05:30
Henit Chobisa
2af97eb520 feat: added operations to perform and retrieve operation to and from slack 2023-12-20 10:54:43 +05:30
Henit Chobisa
8551b3435e feat: adde service to perform operations on slack and retrieve integration 2023-12-20 10:53:59 +05:30
Henit Chobisa
6f3121ae3e feat: added project service to fetch data from plane projects 2023-12-20 10:53:37 +05:30
Henit Chobisa
bc2cea6e35 feat: updated schemas for db 2023-12-20 10:52:36 +05:30
Henit Chobisa
9aaefa4023 feat: added project, states schema and relations 2023-12-20 10:52:20 +05:30
Henit Chobisa
06f1b1c14a feat: added user and workspace schema, relations 2023-12-20 10:51:52 +05:30
Henit Chobisa
a495ff905e feat: added workspace integration schema for database query 2023-12-20 10:51:04 +05:30
Henit Chobisa
ee17ec5f64 feat: added schema integration in drizzle db query client 2023-12-20 10:48:59 +05:30
Henit Chobisa
f9b868234f feat: added helper function to generate slack message from issue activity 2023-12-20 10:48:14 +05:30
Henit Chobisa
ffc82f413c feat: added helper for creating listed projects to static select options 2023-12-20 10:46:26 +05:30
Henit Chobisa
4aca5eccb8 feat: added modal view templates for Project Modal and Issue Create Modal 2023-12-20 10:45:56 +05:30
Henit Chobisa
414e058bba chore: updated depencenty 2023-12-20 10:45:10 +05:30
Henit Chobisa
f3ee340838 chore: updated slack auth url, to include incoming webhook url scope 2023-12-20 10:44:46 +05:30
pablohashescobar
973e76355b dev: update controller to use logger and spread the resultData in getAllEntities 2023-12-20 00:06:04 +05:30
pablohashescobar
b49b0ea4a7 dev: github comments and links for the imported issues 2023-12-20 00:05:13 +05:30
pablohashescobar
cf965103fa dev: github importer all issues import 2023-12-19 17:43:15 +05:30
pablohashescobar
8da0da4948 dev: initiate github import 2023-12-19 13:15:08 +05:30
NarayanBavisetti
3cb884d7eb chore: exception handleing 2023-12-18 20:28:35 +05:30
NarayanBavisetti
f489a9f377 chore: module and module issues imported 2023-12-18 19:17:45 +05:30
pablohashescobar
74d3b08ed8 Merge branch 'release-0.15' of github.com:makeplane/plane into dev/segway-integration 2023-12-18 19:12:05 +05:30
NarayanBavisetti
b384c40cb8 chore: jira controller 2023-12-18 16:32:17 +05:30
NarayanBavisetti
f111511cf5 chore: issue imports with state 2023-12-18 16:31:47 +05:30
NarayanBavisetti
eb4b9728f1 chore: api server files 2023-12-18 00:10:24 +05:30
NarayanBavisetti
3308388baf chore: create of issues done 2023-12-18 00:07:52 +05:30
pablohashescobar
c8e39f23ed dev: add external id and source added 2023-12-15 16:43:29 +05:30
pablohashescobar
91cbc8e56f dev: refactor the structure and add database integration to the app 2023-12-15 14:27:19 +05:30
pablohashescobar
1c390db493 dev: setup segway and django connection 2023-12-14 16:34:54 +05:30
pablohashescobar
edd476e909 dev: node to celery connection 2023-12-14 13:41:03 +05:30
pablohashescobar
9876a79e3c dev: create celery node queue for consuming messages from django 2023-12-14 13:15:04 +05:30
pablohashescobar
9eeb2d3af8 dev: create new workers 2023-12-13 16:33:21 +05:30
pablohashescobar
240ef2a60e dev: create communication with the segway server 2023-12-13 13:31:02 +05:30
pablohashescobar
6635767f64 dev: import refactors 2023-12-12 20:38:54 +05:30
pablohashescobar
cf2232aaeb dev: initialize segway with queue setup 2023-12-12 20:34:33 +05:30
NarayanBavisetti
5667e10cca feat: implemented rabbitmq 2023-12-12 00:22:32 +05:30
84 changed files with 7310 additions and 667 deletions

3
.gitignore vendored
View File

@@ -80,3 +80,6 @@ tmp/
## packages
dist
.temp/
# logs
combined.log

View File

@@ -1,3 +1,3 @@
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
worker: celery -A plane worker -l info
worker: celery -A plane worker -l info -Q internal_tasks,external_tasks,segway_tasks
beat: celery -A plane beat -l INFO

View File

@@ -2,7 +2,10 @@
import os
import sys
import dotenv
if __name__ == '__main__':
dotenv.read_dotenv()
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'plane.settings.production')

View File

@@ -54,10 +54,15 @@ class IssueSerializer(BaseSerializer):
"updated_by",
"created_at",
"updated_at",
"archived_at",
"external_id",
"external_source",
]
exclude = [
"description",
"description_stripped",
"external_id",
"external_source",
]
def validate(self, data):

View File

@@ -281,20 +281,22 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
)
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
issue_activity.apply_async(
args=[],
kwargs={
'type': "cycle.activity.deleted",
'requested_data': json.dumps({
"cycle_id": str(pk),
"cycle_name": str(cycle.name),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
actor_id=str(request.user.id),
issue_id=None,
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
}),
'actor_id': str(request.user.id),
'issue_id': None,
'project_id': str(project_id),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
# Delete the cycle
cycle.delete()
@@ -454,21 +456,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
)
# Capture Issue Activity
issue_activity.delay(
type="cycle.activity.created",
requested_data=json.dumps({"cycles_list": str(issues)}),
actor_id=str(self.request.user.id),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{
issue_activity.apply_async(
args=[],
kwargs={
'type': "cycle.activity.created",
'requested_data': json.dumps({"cycles_list": str(issues)}),
'actor_id': str(self.request.user.id),
'issue_id': None,
'project_id': str(self.kwargs.get("project_id", None)),
'current_instance': json.dumps({
"updated_cycle_issues": update_cycle_issue_activity,
"created_cycle_issues": serializers.serialize(
"json", record_to_create
),
}
),
epoch=int(timezone.now().timestamp()),
"created_cycle_issues": serializers.serialize("json", record_to_create),
}),
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
# Return all Cycle Issues
@@ -483,19 +485,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
)
issue_id = cycle_issue.issue_id
cycle_issue.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
issue_activity.apply_async(
args=[],
kwargs={
'type': "cycle.activity.deleted",
'requested_data': json.dumps({
"cycle_id": str(self.kwargs.get("cycle_id")),
"issues": [str(issue_id)],
}
),
actor_id=str(self.request.user.id),
issue_id=str(issue_id),
project_id=str(self.kwargs.get("project_id", None)),
current_instance=None,
epoch=int(timezone.now().timestamp()),
}),
'actor_id': str(self.request.user.id),
'issue_id': str(issue_id),
'project_id': str(self.kwargs.get("project_id", None)),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -142,14 +142,18 @@ class InboxIssueAPIEndpoint(BaseAPIView):
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
"type": "issue.activity.created",
"requested_data": json.dumps(request.data, cls=DjangoJSONEncoder),
"actor_id": str(request.user.id),
"issue_id": str(issue.id),
"project_id": str(project_id),
"current_instance": None,
"epoch": int(timezone.now().timestamp()),
},
routing_key="external",
)
# create an inbox issue
@@ -232,17 +236,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[],
kwargs={
"type": "issue.activity.updated",
"requested_data": requested_data,
"actor_id": str(request.user.id),
"issue_id": str(issue_id),
"project_id": str(project_id),
"current_instance": json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
"epoch": int(timezone.now().timestamp()),
},
routing_key="external",
)
issue_serializer.save()
else:

View File

@@ -2,52 +2,28 @@
import json
from itertools import chain
from django.core.serializers.json import DjangoJSONEncoder
# Django imports
from django.db import IntegrityError
from django.db.models import (
OuterRef,
Func,
Q,
F,
Case,
When,
Value,
CharField,
Max,
Exists,
)
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import (Case, CharField, Exists, F, Func, Max, OuterRef,
Q, Value, When)
from django.utils import timezone
from plane.api.serializers import (IssueActivitySerializer,
IssueCommentSerializer, IssueLinkSerializer,
IssueSerializer, LabelSerializer)
from plane.app.permissions import (ProjectEntityPermission,
ProjectLitePermission,
ProjectMemberPermission)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (Issue, IssueActivity, IssueAttachment,
IssueComment, IssueLink, Label, Project,
ProjectMember)
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Module imports
from .base import BaseAPIView, WebhookMixin
from plane.app.permissions import (
ProjectEntityPermission,
ProjectMemberPermission,
ProjectLitePermission,
)
from plane.db.models import (
Issue,
IssueAttachment,
IssueLink,
Project,
Label,
ProjectMember,
IssueComment,
IssueActivity,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.api.serializers import (
IssueSerializer,
LabelSerializer,
IssueLinkSerializer,
IssueCommentSerializer,
IssueActivitySerializer,
)
class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
@@ -207,14 +183,18 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
serializer.save()
# Track the issue
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(serializer.data.get("id", None)),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
'type': "issue.activity.created",
'requested_data': json.dumps(self.request.data, cls=DjangoJSONEncoder),
'actor_id': str(request.user.id),
'issue_id': str(serializer.data.get("id", None)),
'project_id': str(project_id),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -228,14 +208,18 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
serializer = IssueSerializer(issue, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[],
kwargs={
'type': "issue.activity.updated",
'requested_data': requested_data,
'actor_id': str(request.user.id),
'issue_id': str(pk),
'project_id': str(project_id),
'current_instance': current_instance,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -246,14 +230,19 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
issue.delete()
issue_activity.delay(
type="issue.activity.deleted",
requested_data=json.dumps({"issue_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[],
kwargs={
'type': "issue.activity.deleted",
'requested_data': json.dumps({"issue_id": str(pk)}),
'actor_id': str(request.user.id),
'issue_id': str(pk),
'project_id': str(project_id),
'current_instance': current_instance,
'epoch': int(timezone.now().timestamp()),
},
routing_key='your_routing_key',
queue='your_queue_name'
)
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -309,7 +298,11 @@ class LabelAPIEndpoint(BaseAPIView):
).data,
)
label = self.get_queryset().get(pk=pk)
serializer = LabelSerializer(label, fields=self.fields, expand=self.expand,)
serializer = LabelSerializer(
label,
fields=self.fields,
expand=self.expand,
)
return Response(serializer.data, status=status.HTTP_200_OK)
def patch(self, request, slug, project_id, pk=None):
@@ -319,7 +312,6 @@ class LabelAPIEndpoint(BaseAPIView):
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, slug, project_id, pk=None):
label = self.get_queryset().get(pk=pk)
@@ -384,14 +376,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
project_id=project_id,
issue_id=issue_id,
)
issue_activity.delay(
type="link.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
'type': "link.activity.created",
'requested_data': json.dumps(serializer.data, cls=DjangoJSONEncoder),
'actor_id': str(self.request.user.id),
'issue_id': str(self.kwargs.get("issue_id")),
'project_id': str(self.kwargs.get("project_id")),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -408,14 +404,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="link.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
'type': "link.activity.updated",
'requested_data': requested_data,
'actor_id': str(request.user.id),
'issue_id': str(issue_id),
'project_id': str(project_id),
'current_instance': current_instance,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -428,14 +428,18 @@ class IssueLinkAPIEndpoint(BaseAPIView):
IssueLinkSerializer(issue_link).data,
cls=DjangoJSONEncoder,
)
issue_activity.delay(
type="link.activity.deleted",
requested_data=json.dumps({"link_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
'type': "link.activity.deleted",
'requested_data': json.dumps({"link_id": str(pk)}),
'actor_id': str(request.user.id),
'issue_id': str(issue_id),
'project_id': str(project_id),
'current_instance': current_instance,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
issue_link.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -507,14 +511,20 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
issue_id=issue_id,
actor=request.user,
)
issue_activity.delay(
type="comment.activity.created",
requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("issue_id")),
project_id=str(self.kwargs.get("project_id")),
current_instance=None,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[],
kwargs={
"type": "comment.activity.created",
"requested_data": json.dumps(
serializer.data, cls=DjangoJSONEncoder
),
"actor_id": str(self.request.user.id),
"issue_id": str(self.kwargs.get("issue_id")),
"project_id": str(self.kwargs.get("project_id")),
"current_instance": None,
"epoch": int(timezone.now().timestamp()),
},
routing_key="external",
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -533,14 +543,18 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
)
if serializer.is_valid():
serializer.save()
issue_activity.delay(
type="comment.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[],
kwargs={
"type": "comment.activity.updated",
"requested_data": requested_data,
"actor_id": str(request.user.id),
"issue_id": str(issue_id),
"project_id": str(project_id),
"current_instance": current_instance,
"epoch": int(timezone.now().timestamp()),
},
routing_key="external",
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -554,14 +568,18 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
cls=DjangoJSONEncoder,
)
issue_comment.delete()
issue_activity.delay(
type="comment.activity.deleted",
requested_data=json.dumps({"comment_id": str(pk)}),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
"type": "comment.activity.deleted",
"requested_data": json.dumps({"comment_id": str(pk)}),
"actor_id": str(request.user.id),
"issue_id": str(issue_id),
"project_id": str(project_id),
"current_instance": current_instance,
"epoch": int(timezone.now().timestamp()),
},
routing_key="external",
)
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -582,7 +600,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
)
.select_related("actor", "workspace", "issue", "project")
).order_by(request.GET.get("order_by", "created_at"))
if pk:
issue_activities = issue_activities.get(pk=pk)
serializer = IssueActivitySerializer(issue_activities)

View File

@@ -166,20 +166,22 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
issue_activity.apply_async(
args=[],
kwargs={
'type': "module.activity.deleted",
'requested_data': json.dumps({
"module_id": str(pk),
"module_name": str(module.name),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
actor_id=str(request.user.id),
issue_id=None,
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
}),
'actor_id': str(request.user.id),
'issue_id': None,
'project_id': str(project_id),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
module.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@@ -330,21 +332,21 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
)
# Capture Issue Activity
issue_activity.delay(
type="module.activity.created",
requested_data=json.dumps({"modules_list": str(issues)}),
actor_id=str(self.request.user.id),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{
issue_activity.apply_async(
args=[],
kwargs={
'type': "module.activity.created",
'requested_data': json.dumps({"modules_list": str(issues)}),
'actor_id': str(self.request.user.id),
'issue_id': None,
'project_id': str(self.kwargs.get("project_id", None)),
'current_instance': json.dumps({
"updated_module_issues": update_module_issue_activity,
"created_module_issues": serializers.serialize(
"json", record_to_create
),
}
),
epoch=int(timezone.now().timestamp()),
"created_module_issues": serializers.serialize("json", record_to_create),
}),
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(
@@ -357,18 +359,20 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
workspace__slug=slug, project_id=project_id, module_id=module_id, issue_id=issue_id
)
module_issue.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
issue_activity.apply_async(
args=[], # If no positional arguments are required
kwargs={
'type': "module.activity.deleted",
'requested_data': json.dumps({
"module_id": str(module_id),
"issues": [str(module_issue.issue_id)],
}
),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
}),
'actor_id': str(request.user.id),
'issue_id': str(issue_id),
'project_id': str(project_id),
'current_instance': None,
'epoch': int(timezone.now().timestamp()),
},
routing_key='external',
)
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -1,36 +1,20 @@
# Django imports
from django.utils import timezone
from plane.db.models import (CommentReaction, Cycle, CycleIssue, Issue,
IssueActivity, IssueAssignee, IssueAttachment,
IssueComment, IssueLabel, IssueLink,
IssueProperty, IssueReaction, IssueRelation,
IssueSubscriber, IssueVote, Label, Module,
ModuleIssue, User)
# Third Party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer, StateSerializer
from .user import UserLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
Issue,
IssueActivity,
IssueComment,
IssueProperty,
IssueAssignee,
IssueSubscriber,
IssueLabel,
Label,
CycleIssue,
Cycle,
Module,
ModuleIssue,
IssueLink,
IssueAttachment,
IssueReaction,
CommentReaction,
IssueVote,
IssueRelation,
)
class IssueFlatSerializer(BaseSerializer):
@@ -231,7 +215,13 @@ class IssueActivitySerializer(BaseSerializer):
model = IssueActivity
fields = "__all__"
def to_representation(self, instance):
return {
'actor_detail': UserLiteSerializer(instance.actor).data,
'issue_detail': IssueFlatSerializer(instance.issue).data,
'project_detail': ProjectLiteSerializer(instance.project).data,
'workspace_detail': WorkspaceLiteSerializer(instance.workspace).data,
}
class IssuePropertySerializer(BaseSerializer):
class Meta:

View File

@@ -1,11 +1,14 @@
# Python imports
import uuid
import json
import requests
# Third party imports
from rest_framework import status
from rest_framework.response import Response
# Django imports
from django.conf import settings
from django.db.models import Max, Q
# Module imports
@@ -34,20 +37,15 @@ from plane.app.serializers import (
IssueFlatSerializer,
ModuleSerializer,
)
from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import jira_project_issue_summary
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.app.permissions import WorkSpaceAdminPermission
from plane.bgtasks.importer_task import service_importer
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
def get(self, request, slug, service):
if service == "github":
owner = request.GET.get("owner", False)
repo = request.GET.get("repo", False)
if not owner or not repo:
return Response(
{"error": "Owner and repo are required"},
@@ -58,11 +56,10 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
integration__provider="github", workspace__slug=slug
)
access_tokens_url = workspace_integration.metadata.get(
"access_tokens_url", False
)
installtion_id = workspace_integration.config.get("installation_id", False)
if not access_tokens_url:
# Check for the installation id
if not installtion_id:
return Response(
{
"error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app."
@@ -70,18 +67,33 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
status=status.HTTP_400_BAD_REQUEST,
)
issue_count, labels, collaborators = get_github_repo_details(
access_tokens_url, owner, repo
)
# Request segway for the required information
if settings.SEGWAY_BASE_URL:
headers = {
"Content-Type": "application/json",
"x-api-key": settings.SEGWAY_KEY,
}
data = {
"owner": owner,
"repo": repo,
"installationId": installtion_id,
}
res = requests.post(
f"{settings.SEGWAY_BASE_URL}/api/github",
data=json.dumps(data),
headers=headers,
)
if "error" in res.json():
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
res.json(),
status=status.HTTP_200_OK,
)
return Response(
{
"issue_count": issue_count,
"labels": labels,
"collaborators": collaborators,
},
status=status.HTTP_200_OK,
{"error": "Inetgration service is not available please try later"},
status=status.HTTP_400_BAD_REQUEST,
)
if service == "jira":
# Check for all the keys
params = {
@@ -102,16 +114,35 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
email = request.GET.get("email", "")
cloud_hostname = request.GET.get("cloud_hostname", "")
response = jira_project_issue_summary(
email, api_token, project_key, cloud_hostname
)
if "error" in response:
return Response(response, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
response,
status=status.HTTP_200_OK,
if settings.SEGWAY_BASE_URL:
headers = {
"Content-Type": "application/json",
"x-api-key": settings.SEGWAY_KEY,
}
data = {
"project_key": project_key,
"api_token": api_token,
"email": email,
"cloud_hostname": cloud_hostname,
}
res = requests.post(
f"{settings.SEGWAY_BASE_URL}/api/jira",
data=json.dumps(data),
headers=headers,
)
if "error" in res.json():
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
else:
return Response(
res.json(),
status=status.HTTP_200_OK,
)
return Response(
{"error": "Inetgration service is not available please try later"},
status=status.HTTP_400_BAD_REQUEST,
)
return Response(
{"error": "Service not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
@@ -122,7 +153,21 @@ class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
if service not in ["github", "jira"]:
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
)
if service == "github":
workspace_integration = WorkspaceIntegration.objects.get(
integration__provider="github", workspace__slug=slug
)
installation_id = workspace_integration.config.get("installation_id", False)
project_id = request.data.get("project_id", False)
if not project_id:
@@ -130,87 +175,84 @@ class ImportServiceEndpoint(BaseAPIView):
{"error": "Project ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace = Workspace.objects.get(slug=slug)
if service == "github":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
# Validate the data
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata or not config:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
# Update config
if config and service == "github":
config.update({"installation_id": installation_id})
if service == "jira":
data = request.data.get("data", False)
metadata = request.data.get("metadata", False)
config = request.data.get("config", False)
if not data or not metadata:
return Response(
{"error": "Data, config and metadata are required"},
status=status.HTTP_400_BAD_REQUEST,
)
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
# Get the api token -- # derecated
api_token = APIToken.objects.filter(
user=request.user, workspace=workspace
).first()
if api_token is None:
api_token = APIToken.objects.create(
user=request.user,
label="Importer",
workspace=workspace,
)
service_importer.delay(service, importer.id)
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(
{"error": "Servivce not supported yet"},
status=status.HTTP_400_BAD_REQUEST,
# Create an import
importer = Importer.objects.create(
service=service,
project_id=project_id,
status="queued",
initiated_by=request.user,
data=data,
metadata=metadata,
token=api_token,
config=config,
created_by=request.user,
updated_by=request.user,
)
# Push it to segway
if settings.SEGWAY_BASE_URL:
headers = {
"Content-Type": "application/json",
"x-api-key": settings.SEGWAY_KEY,
}
data = {
"metadata": metadata,
"data": data,
"config": config,
"workspace_id": str(workspace.id),
"project_id": str(project_id),
"created_by": str(request.user.id),
"importer_id": str(importer.id),
}
res = requests.post(
f"{settings.SEGWAY_BASE_URL}/api/github/import",
data=json.dumps(data),
headers=headers,
)
if "error" in res.json():
importer.status = "failed"
importer.reason = str(res.json())
importer.save()
else:
importer.status = "processing"
importer.save(update_fields=["status"])
else:
importer.status = "failed"
importer.reason = "Segway base url is not present"
importer.save(update_fields=["status", "reason"])
# return the response
serializer = ImporterSerializer(importer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def get(self, request, slug):
imports = (
Importer.objects.filter(workspace__slug=slug)
@@ -221,9 +263,7 @@ class ImportServiceEndpoint(BaseAPIView):
return Response(serializer.data)
def delete(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
importer = Importer.objects.get(pk=pk, service=service, workspace__slug=slug)
if importer.imported_data is not None:
# Delete all imported Issues
@@ -241,9 +281,7 @@ class ImportServiceEndpoint(BaseAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
def patch(self, request, slug, service, pk):
importer = Importer.objects.get(
pk=pk, service=service, workspace__slug=slug
)
importer = Importer.objects.get(pk=pk, service=service, workspace__slug=slug)
serializer = ImporterSerializer(importer, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
@@ -479,9 +517,7 @@ class BulkImportModulesEndpoint(BaseAPIView):
[
ModuleLink(
module=module,
url=module_data.get("link", {}).get(
"url", "https://plane.so"
),
url=module_data.get("link", {}).get("url", "https://plane.so"),
title=module_data.get("link", {}).get(
"title", "Original Issue"
),

View File

@@ -1,8 +1,11 @@
# Python improts
import uuid
import requests
import json
# Django imports
from django.contrib.auth.hashers import make_password
from django.conf import settings
# Third party imports
from rest_framework.response import Response
@@ -27,6 +30,7 @@ from plane.utils.integrations.github import (
from plane.app.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
model = Integration
@@ -46,9 +50,7 @@ class IntegrationViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
serializer = IntegrationSerializer(
integration, data=request.data, partial=True
)
serializer = IntegrationSerializer(integration, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
@@ -94,14 +96,30 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
{"error": "Installation ID is required"},
status=status.HTTP_400_BAD_REQUEST,
)
metadata = get_github_metadata(installation_id)
# Push it to segway
if settings.SEGWAY_BASE_URL:
headers = {
"Content-Type": "application/json",
"x-api-key": settings.SEGWAY_KEY,
}
data = {"installationId": installation_id}
res = requests.post(
f"{settings.SEGWAY_BASE_URL}/api/github/metadata",
data=json.dumps(data),
headers=headers,
)
if "error" in res.json():
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
metadata = res.json()
config = {"installation_id": installation_id}
if provider == "slack":
code = request.data.get("code", False)
if not code:
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
return Response(
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
)
slack_response = slack_oauth(code=code)
@@ -123,9 +141,7 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
is_password_autoset=True,
is_bot=True,
first_name=integration.title,
avatar=integration.avatar_url
if integration.avatar_url is not None
else "",
avatar=integration.avatar_url if integration.avatar_url is not None else "",
)
# Create an API Token for the bot user
@@ -161,11 +177,9 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
)
if workspace_integration.integration.provider == "github":
installation_id = workspace_integration.config.get(
"installation_id", False
)
installation_id = workspace_integration.config.get("installation_id", False)
if installation_id:
delete_github_installation(installation_id=installation_id)
workspace_integration.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -1,8 +1,15 @@
# Python imports
import json
import requests
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from sentry_sdk import capture_exception
# Django imports
from django.conf import settings
# Module imports
from plane.app.views import BaseViewSet, BaseAPIView
from plane.db.models import (
@@ -35,19 +42,32 @@ class GithubRepositoriesEndpoint(BaseAPIView):
workspace__slug=slug, pk=workspace_integration_id
)
if workspace_integration.integration.provider != "github":
installation_id = workspace_integration.config.get("installation_id")
if not installation_id:
return Response(
{"error": "Not a github integration"},
status=status.HTTP_400_BAD_REQUEST,
)
access_tokens_url = workspace_integration.metadata["access_tokens_url"]
repositories_url = (
workspace_integration.metadata["repositories_url"]
+ f"?per_page=100&page={page}"
)
repositories = get_github_repos(access_tokens_url, repositories_url)
return Response(repositories, status=status.HTTP_200_OK)
# Push it to segway
if settings.SEGWAY_BASE_URL:
headers = {
"Content-Type": "application/json",
"x-api-key": settings.SEGWAY_KEY,
}
data = {
"installationId": installation_id,
"page": page,
}
res = requests.post(
f"{settings.SEGWAY_BASE_URL}/api/github/repos",
data=json.dumps(data),
headers=headers,
)
if "error" in res.json():
return Response(res.json(), status=status.HTTP_400_BAD_REQUEST)
else:
return Response(res.json(), status=status.HTTP_200_OK)
class GithubRepositorySyncViewSet(BaseViewSet):

View File

@@ -3,84 +3,49 @@ import json
import random
from itertools import chain
# Django imports
from django.db import models
from django.utils import timezone
from django.db.models import (
Prefetch,
OuterRef,
Func,
F,
Q,
Count,
Case,
Value,
CharField,
When,
Exists,
Max,
IntegerField,
)
from django.core.serializers.json import DjangoJSONEncoder
# Django imports
from django.db import IntegrityError, models
from django.db.models import (Case, CharField, Count, Exists, F, Func,
IntegerField, Max, OuterRef, Prefetch, Q, Value,
When)
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
from rest_framework.parsers import MultiPartParser, FormParser
# Module imports
from . import BaseViewSet, BaseAPIView, WebhookMixin
from plane.app.serializers import (
IssueCreateSerializer,
IssueActivitySerializer,
IssueCommentSerializer,
IssuePropertySerializer,
IssueSerializer,
LabelSerializer,
IssueFlatSerializer,
IssueLinkSerializer,
IssueLiteSerializer,
IssueAttachmentSerializer,
IssueSubscriberSerializer,
ProjectMemberLiteSerializer,
IssueReactionSerializer,
CommentReactionSerializer,
IssueVoteSerializer,
IssueRelationSerializer,
RelatedIssueSerializer,
IssuePublicSerializer,
)
from plane.app.permissions import (
ProjectEntityPermission,
WorkSpaceAdminPermission,
ProjectMemberPermission,
ProjectLitePermission,
)
from plane.db.models import (
Project,
Issue,
IssueActivity,
IssueComment,
IssueProperty,
Label,
IssueLink,
IssueAttachment,
State,
IssueSubscriber,
ProjectMember,
IssueReaction,
CommentReaction,
ProjectDeployBoard,
IssueVote,
IssueRelation,
ProjectPublicMember,
)
from plane.app.permissions import (ProjectEntityPermission,
ProjectLitePermission,
ProjectMemberPermission,
WorkSpaceAdminPermission)
from plane.app.serializers import (CommentReactionSerializer,
IssueActivitySerializer,
IssueAttachmentSerializer,
IssueCommentSerializer,
IssueCreateSerializer, IssueFlatSerializer,
IssueLinkSerializer, IssueLiteSerializer,
IssuePropertySerializer,
IssuePublicSerializer,
IssueReactionSerializer,
IssueRelationSerializer, IssueSerializer,
IssueSubscriberSerializer,
IssueVoteSerializer, LabelSerializer,
ProjectMemberLiteSerializer,
RelatedIssueSerializer)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (CommentReaction, Issue, IssueActivity,
IssueAttachment, IssueComment, IssueLink,
IssueProperty, IssueReaction, IssueRelation,
IssueSubscriber, IssueVote, Label, Project,
ProjectDeployBoard, ProjectMember,
ProjectPublicMember, State)
from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters
from rest_framework import status
from rest_framework.parsers import FormParser, MultiPartParser
# Third Party imports
from rest_framework.response import Response
# Module imports
from . import BaseAPIView, BaseViewSet, WebhookMixin
class IssueViewSet(WebhookMixin, BaseViewSet):
@@ -266,6 +231,7 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
if serializer.is_valid():
print("serializervalid")
serializer.save()
issue_activity.delay(
type="issue.activity.updated",
@@ -1626,4 +1592,4 @@ class IssueDraftViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -373,7 +373,7 @@ def generate_non_segmented_rows(
return [tuple(row_zero)] + rows
@shared_task
@shared_task(queue='internal_tasks')
def analytic_export_task(email, data, slug):
try:
filters = issue_filters(data, "POST")

View File

@@ -29,7 +29,7 @@ def posthogConfiguration():
return None, None
@shared_task
@shared_task(queue='internal_tasks')
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
@@ -54,7 +54,7 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
capture_exception(e)
@shared_task
@shared_task(queue='internal_tasks')
def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_from):
try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()

View File

@@ -259,7 +259,7 @@ def generate_xlsx(header, project_id, issues, files):
files.append((f"{project_id}.xlsx", xlsx_file))
@shared_task
@shared_task(queue='internal_tasks')
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
try:
exporter_instance = ExporterHistory.objects.get(token=token_id)

View File

@@ -15,7 +15,7 @@ from botocore.client import Config
from plane.db.models import ExporterHistory
@shared_task
@shared_task(queue='internal_tasks')
def delete_old_s3_link():
# Get a list of keys and IDs to process
expired_exporter_history = ExporterHistory.objects.filter(

View File

@@ -12,7 +12,7 @@ from celery import shared_task
from plane.db.models import FileAsset
@shared_task
@shared_task(queue='internal_tasks')
def delete_file_asset():
# file assets to delete

View File

@@ -17,7 +17,7 @@ from sentry_sdk import capture_exception
from plane.license.utils.instance_value import get_email_configuration
@shared_task
@shared_task(queue='internal_tasks')
def forgot_password(first_name, email, uidb64, token, current_site):
try:
relative_link = (

View File

@@ -1,200 +1,470 @@
# Python imports
import json
import requests
import uuid
from functools import wraps
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import Q, Max
from django.contrib.auth.hashers import make_password
# Third Party imports
from celery import shared_task
from sentry_sdk import capture_exception
from celery.exceptions import MaxRetriesExceededError
# Module imports
from plane.app.serializers import ImporterSerializer
from plane.db.models import (
Importer,
WorkspaceMember,
GithubRepositorySync,
GithubRepository,
ProjectMember,
WorkspaceIntegration,
Label,
User,
State,
Issue,
Module,
Cycle,
IssueProperty,
IssueAssignee,
IssueLabel,
IssueSequence,
IssueActivity,
IssueComment,
IssueLink,
ModuleIssue,
)
from plane.bgtasks.user_welcome_task import send_welcome_slack
from rest_framework.response import Response
@shared_task
@shared_task(queue="internal_tasks")
def service_importer(service, importer_id):
pass
## Utility functions
def get_label_id(name, data):
try:
importer = Importer.objects.get(pk=importer_id)
importer.status = "processing"
importer.save()
existing_label = (
Label.objects.filter(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
name__iexact=name,
)
.values("id")
.first()
)
return existing_label
except Label.DoesNotExist:
return None
users = importer.data.get("users", [])
# Check if we need to import users as well
if len(users):
# For all invited users create the users
new_users = User.objects.bulk_create(
[
User(
email=user.get("email").strip().lower(),
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
)
for user in users
if user.get("import", False) == "invite"
],
batch_size=10,
ignore_conflicts=True,
def get_state_id(name, data):
try:
existing_state = (
State.objects.filter(
name__iexact=name,
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
.values("id")
.first()
)
return existing_state
except State.DoesNotExist:
return None
def get_user_id(name):
try:
existing_user = User.objects.filter(email=name).values("id").first()
return existing_user
except User.DoesNotExist:
return None
def update_imported_items(importer_id, entity, entity_id):
importer = Importer.objects.get(pk=importer_id)
if importer.imported_data:
importer.imported_data.setdefault(str(entity), []).append(str(entity_id))
else:
importer.imported_data = {
str(entity): [str(entity_id)]
}
importer.save()
## Sync functions
def members_sync(data):
try:
user = User.objects.get(email=data.get("email"))
_ = WorkspaceMember.objects.get_or_create(
member_id=user.id, workspace_id=data.get("workspace_id")
)
_ = ProjectMember.objects.get_or_create(
member_id=user.id,
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
_ = IssueProperty.objects.get_or_create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
user_id=user.id,
created_by_id=data.get("created_by"),
)
except User.DoesNotExist:
# For all invited users create the users
new_user = User.objects.create(
email=data.get("email").strip().lower(),
username=uuid.uuid4().hex,
password=make_password(uuid.uuid4().hex),
is_password_autoset=True,
)
service = data.get("external_source")
WorkspaceMember.objects.create(
member_id=new_user.id,
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by"),
)
ProjectMember.objects.create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
member_id=new_user.id,
created_by_id=data.get("created_by"),
)
IssueProperty.objects.create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
user_id=new_user.id,
created_by_id=data.get("created_by"),
)
if data.get("source", False) == "slack":
send_welcome_slack.delay(
str(new_user.id),
True,
f"{new_user.email} was imported to Plane from {service}",
)
_ = [
send_welcome_slack.delay(
str(user.id),
True,
f"{user.email} was imported to Plane from {service}",
)
for user in new_users
]
workspace_users = User.objects.filter(
email__in=[
user.get("email").strip().lower()
for user in users
if user.get("import", False) == "invite"
or user.get("import", False) == "map"
]
def label_sync(data):
existing_label = Label.objects.filter(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
name__iexact=data.get("name"),
external_id=data.get("external_id", None),
external_source=data.get("external_source"),
)
if not existing_label.exists() and data.get("name"):
label = Label.objects.create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
name=data.get("name"),
color=data.get("color"),
created_by_id=data.get("created_by"),
external_id=data.get("external_id", None),
external_source=data.get("external_source"),
)
update_imported_items(data.get("importer_id"), "labels", label.id)
def state_sync(data):
try:
state = State.objects.get(
external_id=data.get("external_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
except State.DoesNotExist:
existing_states = State.objects.filter(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
group=data.get("state_group"),
name__iexact=data.get("state_name"),
)
if existing_states.exists():
existing_state = existing_states.first()
existing_state.external_id = data.get("external_id")
existing_state.external_source = data.get("external_source")
existing_state.save()
else:
state = State.objects.create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
name=data.get("state_name"),
group=data.get("state_group"),
created_by_id=data.get("created_by"),
external_id=data.get("external_id"),
external_source=data.get("external_source"),
)
update_imported_items(data.get("importer_id"), "states", state.id)
# Check if any of the users are already member of workspace
_ = WorkspaceMember.objects.filter(
member__in=[user for user in workspace_users],
workspace_id=importer.workspace_id,
).update(is_active=True)
# Add new users to Workspace and project automatically
WorkspaceMember.objects.bulk_create(
[
WorkspaceMember(
member=user,
workspace_id=importer.workspace_id,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
def issue_sync(data):
try:
issue = Issue.objects.get(
external_id=data.get("external_id"),
external_source=data.get("external_source"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
except Issue.DoesNotExist:
# Get the default state
default_state = State.objects.filter(
~Q(name="Triage"), project_id=data.get("project_id"), default=True
).first()
ProjectMember.objects.bulk_create(
[
ProjectMember(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
member=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
IssueProperty.objects.bulk_create(
[
IssueProperty(
project_id=importer.project_id,
workspace_id=importer.workspace_id,
user=user,
created_by=importer.created_by,
)
for user in workspace_users
],
batch_size=100,
ignore_conflicts=True,
)
# Check if sync config is on for github importers
if service == "github" and importer.config.get("sync", False):
name = importer.metadata.get("name", False)
url = importer.metadata.get("url", False)
config = importer.metadata.get("config", {})
owner = importer.metadata.get("owner", False)
repository_id = importer.metadata.get("repository_id", False)
workspace_integration = WorkspaceIntegration.objects.get(
workspace_id=importer.workspace_id, integration__provider="github"
)
# Delete the old repository object
GithubRepositorySync.objects.filter(project_id=importer.project_id).delete()
GithubRepository.objects.filter(project_id=importer.project_id).delete()
# Create a Label for github
label = Label.objects.filter(
name="GitHub", project_id=importer.project_id
# if there is no default state assign any random state
if default_state is None:
default_state = State.objects.filter(
~Q(name="Triage"), project_id=data.get("project_id")
).first()
if label is None:
label = Label.objects.create(
name="GitHub",
project_id=importer.project_id,
description="Label to sync Plane issues with GitHub issues",
color="#003773",
)
# Create repository
repo = GithubRepository.objects.create(
name=name,
url=url,
config=config,
repository_id=repository_id,
owner=owner,
project_id=importer.project_id,
)
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project_id=data.get("project_id")
).aggregate(largest=Max("sequence"))["largest"]
# Create repo sync
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
credentials=importer.data.get("credentials", {}),
project_id=importer.project_id,
label=label,
)
last_id = 1 if last_id is None else last_id + 1
# Add bot as a member in the project
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor,
role=20,
project_id=importer.project_id,
)
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project_id=data.get("project_id"), state=default_state
).aggregate(largest=Max("sort_order"))["largest"]
if settings.PROXY_BASE_URL:
headers = {"Content-Type": "application/json"}
import_data_json = json.dumps(
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,
)
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
parent_id = None
if data.get("parent_id", False):
parent_id = Issue.objects.filter(
external_id=data.get("parent_id"),
external_source=data.get("external_source"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
).values("id")
# Issues
issue = Issue.objects.create(
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
state_id=get_state_id(data.get("state"), data).get("id")
if get_state_id(data.get("state"), data)
else default_state.id,
name=data.get("name", "Issue Created through Importer")[:255],
description_html=data.get("description_html", "<p></p>"),
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=data.get("start_date", None),
target_date=data.get("target_date", None),
priority=data.get("priority", "none"),
created_by_id=data.get("created_by_id"),
external_id=data.get("external_id"),
external_source=data.get("external_source"),
parent_id=parent_id,
)
# Sequences
_ = IssueSequence.objects.create(
issue=issue,
sequence=issue.sequence_id,
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
# Attach Links
_ = IssueLink.objects.create(
issue=issue,
url=data.get("link", {}).get("url", "https://github.com"),
title=data.get("link", {}).get("title", "Original Issue"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by_id"),
)
# Track the issue activities
_ = IssueActivity.objects.create(
issue=issue,
actor_id=data.get("created_by_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
comment=f"imported the issue from {data.get('external_source')}",
verb="created",
created_by_id=data.get("created_by_id"),
)
update_imported_items(data.get("importer_id"), "issues", issue.id)
def issue_label_sync(data):
issue = Issue.objects.get(
external_source=data.get("external_issue_source"),
external_id=data.get("external_issue_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
if get_label_id(data.get("name"), data):
IssueLabel.objects.create(
issue=issue,
label_id=get_label_id(data.get("name"), data).get("id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by_id"),
)
def issue_assignee_sync(data):
issue = Issue.objects.get(
external_source=data.get("external_issue_source"),
external_id=data.get("external_issue_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
user = User.objects.filter(email=data.get("email")).values("id")
IssueAssignee.objects.create(
issue=issue,
assignee_id=user,
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by_id"),
)
def issue_comment_sync(data):
# Create Comments
issue = Issue.objects.get(
external_source=data.get("external_issue_source"),
external_id=data.get("external_issue_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
IssueComment.objects.create(
issue=issue,
comment_html=data.get("comment_html", "<p></p>"),
actor_id=data.get("created_by_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=get_user_id(data.get("created_by_id")).get("id")
if get_user_id(data.get("created_by_id"))
else data.get("created_by_id"),
external_id=data.get("external_id"),
external_source=data.get("external_source"),
)
def cycles_sync(data):
try:
_ = Cycle.objects.get(
external_id=data.get("external_id"),
external_source=data.get("external_source"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
except Cycle.DoesNotExist:
cycle = Cycle.objects.create(
name=data.get("name"),
description_html=data.get("description_html", "<p></p>"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by"),
external_id=data.get("external_id"),
external_source=data.get("external_source"),
)
update_imported_items(data.get("importer_id"), "cycles", cycle.id)
def module_sync(data):
try:
_ = Module.objects.get(
external_id=data.get("external_id"),
external_source=data.get("external_source"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
except Module.DoesNotExist:
module = Module.objects.create(
name=data.get("name"),
description_html=data.get("description_html", "<p></p>"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by"),
external_id=data.get("external_id"),
external_source=data.get("external_source"),
)
update_imported_items(data.get("importer_id"), "modules", module.id)
def modules_issue_sync(data):
module = Module.objects.get(
external_id=data.get("module_id"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
external_source=data.get("external_source"),
)
issue = Issue.objects.get(
external_id=data.get("issue_id"),
external_source=data.get("external_source"),
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
)
_ = ModuleIssue.objects.create(
module=module,
issue=issue,
project_id=data.get("project_id"),
workspace_id=data.get("workspace_id"),
created_by_id=data.get("created_by"),
)
def import_sync(data):
importer = Importer.objects.get(pk=data.get("importer_id"))
importer.status = data.get("status")
importer.save(update_fields=["status"])
@shared_task(bind=True, queue="segway_task", max_retries=5)
def import_task(self, data):
type = data.get("type")
if type is None:
return
TYPE_MAPPER = {
"member.sync": members_sync,
"label.sync": label_sync,
"state.sync": state_sync,
"issue.sync": issue_sync,
"issue.label.sync": issue_label_sync,
"issue.assignee.sync": issue_assignee_sync,
"issue.comment.sync": issue_comment_sync,
"cycle.sync": cycles_sync,
"module.sync": module_sync,
"module.issue.sync": modules_issue_sync,
"import.sync": import_sync,
}
try:
func = TYPE_MAPPER.get(type)
if func is None:
return
# Call the function
func(data)
return
except Exception as e:
importer = Importer.objects.get(pk=importer_id)
importer.status = "failed"
importer.save()
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
try:
# Retry with exponential backoff
self.retry(exc=e, countdown=50, backoff=2)
except MaxRetriesExceededError:
# For max retries reached items fail the import
importer = Importer.objects.get(pk=data.get("importer_id"))
importer.status = "failed"
importer.reason = e
importer.save()
return

View File

@@ -1,32 +1,20 @@
# Python imports
import json
import requests
import requests
# Third Party imports
from celery import shared_task
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone
# Third Party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import (
User,
Issue,
Project,
Label,
IssueActivity,
State,
Cycle,
Module,
IssueReaction,
CommentReaction,
IssueComment,
)
from plane.app.serializers import IssueActivitySerializer
from plane.bgtasks.notification_task import notifications
# Module imports
from plane.db.models import (CommentReaction, Cycle, Issue, IssueActivity,
IssueComment, IssueReaction, Label, Module,
Project, State, User)
from sentry_sdk import capture_exception
# Track Changes in name
@@ -1471,6 +1459,7 @@ def issue_activity(
epoch,
subscriber=True,
):
print("Activities")
try:
issue_activities = []
@@ -1541,12 +1530,20 @@ def issue_activity(
IssueActivitySerializer(issue_activity).data,
cls=DjangoJSONEncoder,
)
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
json=issue_activity_json,
# _ = requests.post(
# f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
# json=issue_activity_json,
# headers=headers,
# )
response = requests.post(
f"{settings.PROXY_BASE_URL}/api/slack/",
data=issue_activity_json,
headers=headers,
)
print(response)
except Exception as e:
print(e)
capture_exception(e)
notifications.delay(

View File

@@ -16,7 +16,7 @@ from plane.db.models import Issue, Project, State
from plane.bgtasks.issue_activites_task import issue_activity
@shared_task
@shared_task(queue='internal_tasks')
def archive_and_close_old_issues():
archive_old_issues()
close_old_issues()

View File

@@ -0,0 +1,5 @@
from celery import shared_task
@shared_task(queue="segway_tasks")
def issue_sync(data):
print(f"Received data from Segway: {data}")

View File

@@ -17,7 +17,7 @@ from sentry_sdk import capture_exception
from plane.license.utils.instance_value import get_email_configuration
@shared_task
@shared_task(queue='internal_tasks')
def magic_link(email, key, token, current_site):
try:
(

View File

@@ -183,7 +183,7 @@ def createMentionNotification(project, notification_comment, issue, actor_id, me
)
@shared_task
@shared_task(queue='internal_tasks')
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
issue_activities_created = (
json.loads(

View File

@@ -15,7 +15,7 @@ from sentry_sdk import capture_exception
from plane.db.models import Project, User, ProjectMemberInvite
from plane.license.utils.instance_value import get_email_configuration
@shared_task
@shared_task(queue='internal_tasks')
def project_invitation(email, project_id, token, current_site, invitor):
try:
user = User.objects.get(email=invitor)

View File

@@ -11,7 +11,7 @@ from slack_sdk.errors import SlackApiError
from plane.db.models import User
@shared_task
@shared_task(queue='internal_tasks')
def send_welcome_slack(user_id, created, message):
try:
instance = User.objects.get(pk=user_id)

View File

@@ -71,6 +71,7 @@ def get_model_data(event, event_id, many=False):
retry_backoff=600,
max_retries=5,
retry_jitter=True,
queue='internal_tasks'
)
def webhook_task(self, webhook, slug, event, event_data, action):
try:
@@ -161,7 +162,7 @@ def webhook_task(self, webhook, slug, event, event_data, action):
return
@shared_task()
@shared_task(queue='internal_tasks')
def send_webhook(event, payload, kw, action, slug, bulk):
try:
webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True)

View File

@@ -20,7 +20,7 @@ from plane.db.models import Workspace, WorkspaceMemberInvite, User
from plane.license.utils.instance_value import get_email_configuration
@shared_task
@shared_task(queue='internal_tasks')
def workspace_invitation(email, workspace_id, token, current_site, invitor):
try:
user = User.objects.get(email=invitor)

View File

@@ -1,14 +1,19 @@
import os
from celery import Celery
from plane.settings.redis import redis_instance
from celery.schedules import crontab
import dotenv
from celery import Celery
from celery.schedules import crontab
from plane.settings.redis import redis_instance
env_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env')
dotenv.read_dotenv(env_path)
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
ri = redis_instance()
print(ri)
app = Celery("plane")
app = Celery('tasks', broker='pyamqp://guest:guest@localhost:5672//')
# Using a string here means the worker will not have to
# pickle the object when using Windows.

View File

@@ -0,0 +1,73 @@
# Generated by Django 4.2.7 on 2023-12-15 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0050_user_use_case_alter_workspace_organization_size'),
]
operations = [
migrations.AddField(
model_name='cycle',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='cycle',
name='external_source',
field=models.CharField(blank=True, null=True),
),
migrations.AddField(
model_name='issue',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='issue',
name='external_source',
field=models.CharField(blank=True, null=True),
),
migrations.AddField(
model_name='issuecomment',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='issuecomment',
name='external_source',
field=models.CharField(blank=True, null=True),
),
migrations.AddField(
model_name='label',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='label',
name='external_source',
field=models.CharField(blank=True, null=True),
),
migrations.AddField(
model_name='module',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='module',
name='external_source',
field=models.CharField(blank=True, null=True),
),
migrations.AddField(
model_name='state',
name='external_id',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='state',
name='external_source',
field=models.CharField(blank=True, null=True),
),
]

View File

@@ -0,0 +1,18 @@
# Generated by Django 4.2.7 on 2023-12-19 07:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0051_cycle_external_id_cycle_external_source_and_more'),
]
operations = [
migrations.AddField(
model_name='importer',
name='reason',
field=models.TextField(blank=True),
),
]

View File

@@ -22,8 +22,6 @@ class BaseModel(AuditModel):
user = get_current_user()
if user is None or user.is_anonymous:
self.created_by = None
self.updated_by = None
super(BaseModel, self).save(*args, **kwargs)
else:
# Check if the model is being created or updated

View File

@@ -18,6 +18,8 @@ class Cycle(ProjectBaseModel):
)
view_props = models.JSONField(default=dict)
sort_order = models.FloatField(default=65535)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
class Meta:
verbose_name = "Cycle"
@@ -27,9 +29,9 @@ class Cycle(ProjectBaseModel):
def save(self, *args, **kwargs):
if self._state.adding:
smallest_sort_order = Cycle.objects.filter(
project=self.project
).aggregate(smallest=models.Min("sort_order"))["smallest"]
smallest_sort_order = Cycle.objects.filter(project=self.project).aggregate(
smallest=models.Min("sort_order")
)["smallest"]
if smallest_sort_order is not None:
self.sort_order = smallest_sort_order - 10000

View File

@@ -34,6 +34,7 @@ class Importer(ProjectBaseModel):
"db.APIToken", on_delete=models.CASCADE, related_name="importer"
)
imported_data = models.JSONField(null=True)
reason = models.TextField(blank=True)
class Meta:
verbose_name = "Importer"

View File

@@ -102,6 +102,8 @@ class Issue(ProjectBaseModel):
completed_at = models.DateTimeField(null=True)
archived_at = models.DateField(null=True)
is_draft = models.BooleanField(default=False)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
objects = models.Manager()
issue_objects = IssueManager()
@@ -132,7 +134,6 @@ class Issue(ProjectBaseModel):
except ImportError:
pass
if self._state.adding:
# Get the maximum display_id value from the database
last_id = IssueSequence.objects.filter(project=self.project).aggregate(
@@ -210,8 +211,9 @@ class IssueRelation(ProjectBaseModel):
ordering = ("-created_at",)
def __str__(self):
return f"{self.issue.name} {self.related_issue.name}"
return f"{self.issue.name} {self.related_issue.name}"
class IssueMention(ProjectBaseModel):
issue = models.ForeignKey(
Issue, on_delete=models.CASCADE, related_name="issue_mention"
@@ -221,6 +223,7 @@ class IssueMention(ProjectBaseModel):
on_delete=models.CASCADE,
related_name="issue_mention",
)
class Meta:
unique_together = ["issue", "mention"]
verbose_name = "Issue Mention"
@@ -229,7 +232,7 @@ class IssueMention(ProjectBaseModel):
ordering = ("-created_at",)
def __str__(self):
return f"{self.issue.name} {self.mention.email}"
return f"{self.issue.name} {self.mention.email}"
class IssueAssignee(ProjectBaseModel):
@@ -366,6 +369,8 @@ class IssueComment(ProjectBaseModel):
default="INTERNAL",
max_length=100,
)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
def save(self, *args, **kwargs):
self.comment_stripped = (
@@ -416,6 +421,8 @@ class Label(ProjectBaseModel):
description = models.TextField(blank=True)
color = models.CharField(max_length=255, blank=True)
sort_order = models.FloatField(default=65535)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
class Meta:
unique_together = ["name", "project"]

View File

@@ -41,6 +41,8 @@ class Module(ProjectBaseModel):
)
view_props = models.JSONField(default=dict)
sort_order = models.FloatField(default=65535)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
class Meta:
unique_together = ["name", "project"]

View File

@@ -24,6 +24,8 @@ class State(ProjectBaseModel):
max_length=20,
)
default = models.BooleanField(default=False)
external_source = models.CharField(null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
def __str__(self):
"""Return name of the state"""

View File

@@ -2,19 +2,19 @@
# Python imports
import os
import ssl
import certifi
from datetime import timedelta
from urllib.parse import urlparse
# Django imports
from django.core.management.utils import get_random_secret_key
import certifi
# Third party imports
import dj_database_url
import sentry_sdk
# Django imports
from django.core.management.utils import get_random_secret_key
from kombu import Exchange, Queue
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -148,6 +148,9 @@ else:
REDIS_URL = os.environ.get("REDIS_URL")
REDIS_SSL = REDIS_URL and "rediss" in REDIS_URL
# RabbitMq Config
RABBITMQ_URL = os.environ.get("RABBITMQ_URL")
if REDIS_SSL:
CACHES = {
"default": {
@@ -270,18 +273,28 @@ SIMPLE_JWT = {
# Celery Configuration
CELERY_TIMEZONE = TIME_ZONE
CELERY_TASK_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ["application/json"]
CELERY_ACCEPT_CONTENT = ["json"]
if REDIS_SSL:
redis_url = os.environ.get("REDIS_URL")
broker_url = (
f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
)
CELERY_BROKER_URL = broker_url
CELERY_RESULT_BACKEND = broker_url
else:
CELERY_BROKER_URL = REDIS_URL
CELERY_RESULT_BACKEND = REDIS_URL
CELERY_BROKER_URL = RABBITMQ_URL
CELERY_RESULT_BACKEND = REDIS_URL
CELERY_QUEUES = (
Queue(
"internal_tasks",
Exchange("internal_exchange", type="direct"),
routing_key="internal",
),
Queue(
"external_tasks",
Exchange("external_exchange", type="direct"),
routing_key="external",
),
Queue(
"segway_tasks",
Exchange("segway_exchange", type="direct"),
routing_key="segway",
),
)
CELERY_IMPORTS = (
"plane.bgtasks.issue_automation_task",
@@ -291,7 +304,9 @@ CELERY_IMPORTS = (
# Sentry Settings
# Enable Sentry Settings
if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get("SENTRY_DSN").startswith("https://"):
if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get(
"SENTRY_DSN"
).startswith("https://"):
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN", ""),
integrations=[
@@ -307,7 +322,7 @@ if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get("SENTRY_DSN").st
# Application Envs
PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) # For External
PROXY_BASE_URL = "http://localhost:8080"
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
@@ -327,10 +342,9 @@ USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False)
POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False)
# instance key
INSTANCE_KEY = os.environ.get(
"INSTANCE_KEY", "ae6517d563dfc13d8270bd45cf17b08f70b37d989128a9dab46ff687603333c3"
)
# Skip environment variable configuration
SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1"
# Segway
SEGWAY_BASE_URL = os.environ.get("SEGWAY_BASE_URL", "http://localhost:9000")
SEGWAY_KEY = os.environ.get("SEGWAY_KEY", False)

View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "es2016",
"module": "commonjs",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"strict": true,
"skipLibCheck": true,
"outDir": "./dist",
"baseUrl": "./src",
"moduleResolution": "node",
"experimentalDecorators": true,
"emitDecoratorMetadata": true
},
"include": ["src/**/*.ts"],
"exclude": ["node_modules"]
}

7
segway/.env.example Normal file
View File

@@ -0,0 +1,7 @@
APP_ENV=local
SERVER_PORT=9000
DATABASE_URL=""
RABBITMQ_URL=""
SENTRY_DSN=""
GITHUB_APP_ID=""
GITHUB_APP_PRIVATE_KEY=""

20
segway/Dockerfile.segway Normal file
View File

@@ -0,0 +1,20 @@
# Use the official Node.js 18-alpine image as the base image
FROM node:18-alpine
# Set the working directory inside the container
WORKDIR /usr/src/app
# Copy package.json and package-lock.json to the working directory
COPY package*.json ./
# Install dependencies
RUN npm install
# Copy the rest of the application code to the working directory
COPY . .
# Build the TypeScript code
RUN npm run build
# Expose the port that your application will run on
EXPOSE 9000

3
segway/README.md Normal file
View File

@@ -0,0 +1,3 @@
# Plane Segway
A node process that take care of external integration with plane.

46
segway/package.json Normal file
View File

@@ -0,0 +1,46 @@
{
"name": "segway",
"version": "0.0.1",
"description": "An integration service syncs plane data with external sources.",
"author": "plane team",
"license": "ISC",
"private": true,
"scripts": {
"build": "npx tsc",
"start": "node dist/start.js",
"dev": "concurrently \"npx tsc --watch\" \"nodemon -q dist/start.js\""
},
"dependencies": {
"@octokit/rest": "^20.0.2",
"@overnightjs/core": "^1.7.6",
"@sentry/node": "^7.73.0",
"@sentry/tracing": "^7.73.0",
"amqplib": "^0.10.3",
"axios": "^1.6.2",
"cors": "^2.8.5",
"dotenv": "^16.3.1",
"drizzle-orm": "^0.29.1",
"express": "^4.18.2",
"jsonwebtoken": "^9.0.2",
"node-fetch": "^3.3.2",
"octokit": "^3.1.2",
"postgres": "^3.4.1",
"showdown": "^2.1.0",
"uuid": "^9.0.1",
"winston": "^3.10.0",
"zod": "^3.22.4"
},
"devDependencies": {
"@types/amqplib": "^0.10.4",
"@types/cors": "^2.8.14",
"@types/express": "^4.17.18",
"@types/jsonwebtoken": "^9.0.5",
"@types/node": "^20.8.3",
"@types/pg": "^8.10.9",
"@types/showdown": "^2.0.6",
"concurrently": "^8.2.1",
"drizzle-kit": "^0.20.6",
"nodemon": "^3.0.1",
"typescript": "^5.2.2"
}
}

View File

@@ -0,0 +1,435 @@
// express
import { Request, Response } from "express";
// overnight js
import { Controller, Post, Middleware } from "@overnightjs/core";
// postgres
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
// showdown
import showdown from "showdown";
// octokit
import { Octokit } from "octokit";
import { getOctokit } from "../utils/github.authentication";
// logger
import { logger } from "../utils/logger";
// mq
import { MQSingleton } from "../mq/singleton";
// middleware
import AuthKeyMiddleware from "../middleware/authkey.middleware";
@Controller("api/github")
export class GithubController {
/**
* This controller houses all routes for the Github Importer/Integration
*/
// Initialize database and mq
db: PostgresJsDatabase;
mq: MQSingleton;
constructor(db: PostgresJsDatabase, mq: MQSingleton) {
this.db = db;
this.mq = mq;
}
private getAllEntities = async (
octokit: Octokit,
requestPath: string,
requestParams: any
) => {
let page = 1;
let results;
const returnData = [];
do {
results = await octokit.request(requestPath, { ...requestParams, page });
returnData.push(...results.data);
page++;
} while (results.data.length !== 0);
return returnData;
};
private githubCommentCreator = (
issue_number: number,
comments: { [key: string]: any }[]
) => {
const bulk_comments: { [key: string]: string | number }[] = [];
const converter = new showdown.Converter({ optionKey: "value" });
comments.forEach((comment) => {
if (
parseInt(
comment.issue_url.substring(comment.issue_url.lastIndexOf("/") + 1)
) === issue_number
) {
bulk_comments.push({
external_id: comment.id,
external_source: "github",
comment_html:
comment.body === null
? "<p></p>"
: converter.makeHtml(comment.body),
});
}
});
return bulk_comments;
};
private githubLabelCreator = (
issue_number: number,
labels: (string | { [key: string]: any })[]
) => {
const issueLabels: { [key: string]: string | number }[] = [];
labels.forEach((label) =>
issueLabels.push({
name: typeof label === "object" && label !== null ? label.name : label,
})
);
return issueLabels;
};
@Post("")
@Middleware([AuthKeyMiddleware])
private async home(req: Request, res: Response) {
try {
const { owner, repo, installationId } = req.body;
// Get the octokit instance
const octokit = await getOctokit(installationId);
// Fetch open issues
const openIssuesResponse = await octokit.request("GET /search/issues", {
q: `repo:${owner}/${repo} type:issue state:open`,
});
const openIssuesCount = openIssuesResponse.data.total_count;
// Fetch closed issues
const closedIssuesResponse = await octokit.request("GET /search/issues", {
q: `repo:${owner}/${repo} type:issue state:closed`,
});
const closedIssuesCount = closedIssuesResponse.data.total_count;
// Calculate total issues
const totalIssues = openIssuesCount + closedIssuesCount;
// Fetch total labels count
const labels = await this.getAllEntities(
octokit,
"GET /repos/{owner}/{repo}/labels",
{ owner, repo }
);
// Fetch total collaborators count
const collaborators = await this.getAllEntities(
octokit,
"GET /repos/{owner}/{repo}/collaborators",
{ owner, repo }
);
const labelCount = labels.length;
return res.status(200).json({
issue_count: totalIssues,
labels: labelCount,
collaborators,
});
} catch (error) {
logger.error(error);
return res.json({ message: "Server error", status: 500, error: error });
}
}
@Post("metadata")
@Middleware([AuthKeyMiddleware])
private async metadata(req: Request, res: Response) {
try {
const { installationId } = req.body;
// Get the octokit instance
const octokit = await getOctokit(installationId);
const { data } = await octokit.request("GET /app", {
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
});
return res.status(200).json(data);
} catch (error) {
logger.error(error);
return res.json({ message: "Server error", status: 500, error: error });
}
}
@Post("repos")
@Middleware([AuthKeyMiddleware])
private async repos(req: Request, res: Response) {
try {
const { installationId, page } = req.body;
// Get the octokit instance
const octokit = await getOctokit(installationId);
const { data } = await octokit.request("GET /installation/repositories", {
q: `page=${page}`,
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
});
return res.status(200).json(data);
} catch (error) {
logger.error(error);
return res.json({ message: "Server error", status: 500, error: error });
}
}
@Post("import")
@Middleware([AuthKeyMiddleware])
private async import(req: Request, res: Response) {
const {
metadata: { owner, name: repo },
data: { users },
config: { installation_id, sync },
workspace_id,
project_id,
created_by,
importer_id,
} = req.body;
try {
res.status(200).json({
message: "Successful",
});
// Get the octokit instance
const octokit = await getOctokit(installation_id);
// Markdown converter
const converter = new showdown.Converter({ optionKey: "value" });
// users
const members = [];
for (const user of users) {
if (user?.import == "invite" || user?.import == "map") {
const githubMembers = {
args: [], // args
kwargs: {
data: {
type: "member.sync",
email: user.email,
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
},
}, // kwargs
other_data: {}, // other data
};
members.push(user);
this.mq?.publish(
githubMembers,
"plane.bgtasks.importer_task.import_task"
);
}
}
// Labels
const githubLabels = await octokit.paginate(
octokit.rest.issues.listLabelsForRepo,
{
owner: owner,
repo: repo,
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
per_page: 100,
}
);
for await (const label of githubLabels) {
const labelSync = {
args: [], // args
kwargs: {
data: {
type: "label.sync",
external_source: "github",
external_id: label.id,
color: `#${label.color}`,
name: label.name,
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(labelSync, "plane.bgtasks.importer_task.import_task");
}
// Issues
const githubIssues = await octokit.paginate(
octokit.rest.issues.listForRepo,
{
state: "all",
owner: owner,
repo: repo,
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
per_page: 100,
sort: "created",
direction: "asc",
}
);
// Issue comments
const comments = [];
const githubComments = await octokit.paginate(
octokit.rest.issues.listCommentsForRepo,
{
owner: owner,
repo: repo,
headers: {
"X-GitHub-Api-Version": "2022-11-28",
},
per_page: 100,
}
);
for await (const comment of githubComments) {
comments.push(comment);
}
for await (const issue of githubIssues) {
if (!("pull_request" in issue)) {
const description_html = await converter.makeHtml(
issue?.body_html || "<p><p>"
);
const issueSync = {
args: [], // args
kwargs: {
data: {
type: "issue.sync",
name: issue.title,
description_html: description_html,
state: issue.state,
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
external_id: issue.id,
external_source: "github",
link: {
title: `Original Issue in Github ${issue.number}`,
url: issue.html_url,
},
parent_id: null,
importer_id: importer_id,
},
},
};
// Push the issue
this.mq?.publish(issueSync, "plane.bgtasks.importer_task.import_task");
// Push the comments
const githubIssueComments = this.githubCommentCreator(
issue.number,
comments
);
githubIssueComments.forEach((githubIssueComment) => {
const commentSync = {
args: [],
kwargs: {
data: {
type: "issue.comment.sync",
comment_html: githubIssueComment.comment_html,
external_source: githubIssueComment.external_source,
external_id: githubIssueComment.external_id,
external_issue_id: issue.id,
external_issue_source: "github",
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
},
},
};
// push to queue
this.mq?.publish(
commentSync,
"plane.bgtasks.importer_task.import_task"
);
});
// Push the labels
const githubLabels = this.githubLabelCreator(
issue.number,
issue.labels
);
githubLabels.forEach((githubLabel) => {
const labelSync = {
args: [],
kwargs: {
data: {
type: "issue.label.sync",
name: githubLabel.name,
external_issue_id: issue.id,
external_issue_source: "github",
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
},
},
};
//Push to queue
this.mq?.publish(
labelSync,
"plane.bgtasks.importer_task.import_task"
);
});
}
}
const import_sync = {
args: [], // args
kwargs: {
data: {
type: "import.sync",
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
status: "completed",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
return;
} catch (error) {
logger.error(error);
const import_sync = {
args: [], // args
kwargs: {
data: {
type: "import.sync",
workspace_id: workspace_id,
project_id: project_id,
created_by_id: created_by,
importer_id: importer_id,
status: "failed",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_task");
return res.json({ message: "Server error", status: 500, error: error });
}
}
}

View File

@@ -0,0 +1,3 @@
export * from "./jira.controller";
export * from "./slack.controller"
export * from "./github.controller"

View File

@@ -0,0 +1,429 @@
// overnight js
import { Request, Response } from "express";
import { Controller, Post, Middleware } from "@overnightjs/core";
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
// mq
import { MQSingleton } from "../mq/singleton";
// middleware
import AuthKeyMiddleware from "../middleware/authkey.middleware";
// axios
import axios, { AxiosResponse } from "axios";
import { loadIssues, loadComments } from "../utils/paginator";
@Controller("api/jira")
export class JiraController {
/**
* This controller houses all routes for the Jira Importer
*/
// Initialize database and mq
db: PostgresJsDatabase;
mq: MQSingleton;
constructor(db: PostgresJsDatabase, mq: MQSingleton) {
this.db = db;
this.mq = mq;
}
@Post("")
@Middleware([AuthKeyMiddleware])
private async home(req: Request, res: Response) {
try {
const { email, api_token, project_key, cloud_hostname } = req.body;
const auth = {
username: email,
password: api_token,
};
const headers = {
Accept: "application/json",
};
// Constructing URLs
const issueUrl = `https://${cloud_hostname}/rest/api/3/search?jql=project=${project_key}`;
const moduleUrl = `https://${cloud_hostname}/rest/api/3/search?jql=project=${project_key}`;
const statusUrl = `https://${cloud_hostname}/rest/api/3/status/?jql=project={project_key}`;
const labelsUrl = `https://${cloud_hostname}/rest/api/3/label/?jql=project=${project_key}`;
const usersUrl = `https://${cloud_hostname}/rest/api/3/users/search?jql=project=${project_key}`;
// Making requests
const [
issueResponse,
moduleResponse,
statusResponse,
labelsResponse,
usersResponse,
] = await Promise.all([
axios.get(issueUrl, { auth, headers }),
axios.get(moduleUrl, { auth, headers }),
axios.get(statusUrl, { auth, headers }),
axios.get(labelsUrl, { auth, headers }),
axios.get(usersUrl, { auth, headers }),
]);
const issuesTotal = issueResponse.data.total;
const modulesTotal = moduleResponse.data.total;
const labelsTotal = labelsResponse.data.total;
const statusCount = statusResponse.data.length;
const usersData = usersResponse.data.filter(
(user: any) => user.accountType === "atlassian"
);
res.status(200).json({
issues: issuesTotal,
modules: modulesTotal,
labels: labelsTotal,
states: statusCount,
users: usersData,
});
return;
} catch (error) {
return res.json({ message: "Server error", status: 500, error: error });
}
}
@Post("import")
@Middleware([AuthKeyMiddleware])
private async import(req: Request, res: Response) {
try {
res.status(200).json({
message: "Successful",
});
// const result = await this.db.select().from('users');
const { email, api_token, project_key, cloud_hostname } =
req.body.metadata;
const auth = {
username: email,
password: api_token,
};
const headers = {
Accept: "application/json",
};
const workspace_id = req.body.workspace_id;
const project_id = req.body.project_id;
const created_by = req.body.created_by;
const importer_id = req.body.importer_id;
const users = req.body.data.users;
// users
const members = [];
for (const user of users) {
if (user?.import == "invite" || user?.import == "map") {
const jira_members = {
args: [], // args
kwargs: {
data: {
type: "user.create",
email: user.email,
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
},
}, // kwargs
other_data: {}, // other data
};
members.push(user);
this.mq?.publish(
jira_members,
"plane.bgtasks.importer_task.members_sync"
);
}
}
// labels
const labelsUrl = `https://${cloud_hostname}/rest/api/3/label/?jql=project=${project_key}`;
const labelsResponse = await axios.get(labelsUrl, { auth, headers });
const labels = labelsResponse.data.values;
for (const label of labels) {
const labelssync = {
args: [], // args
kwargs: {
data: {
external_source: "jira",
type: "label.create",
name: label,
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(labelssync, "plane.bgtasks.importer_task.label_sync");
}
// states
const statusUrl = `https://${cloud_hostname}/rest/api/3/project/${project_key}/statuses`;
const response = await axios.get(statusUrl, { auth, headers });
if (response && response.data && response.data.length) {
const statusData = response.data[0];
if (statusData && statusData.statuses) {
for (const statusCategory of statusData.statuses) {
const state_name = statusCategory.name;
const state_group =
statusCategory.statusCategory.name === "To Do"
? "unstarted"
: statusCategory.statusCategory.name === "In Progress"
? "started"
: statusCategory.statusCategory.name === "Done"
? "completed"
: statusCategory.statusCategory.name;
const statessync = {
args: [], // args
kwargs: {
data: {
type: "state.create",
state_name: state_name,
state_group: state_group,
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
external_id: statusCategory.id,
external_source: "jira",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(
statessync,
"plane.bgtasks.importer_task.state_sync"
);
}
}
}
const modules = [];
const child_issues = [];
const module_issues = [];
const url = `https://${cloud_hostname}/rest/api/3/search/?jql=project=${project_key}&fields=comment, issuetype, summary, description, assignee, priority, status, labels, duedate, parent, parentEpic&maxResults=100&expand=renderedFields`;
for await (const issue of loadIssues(url, auth)) {
if (issue.fields.parent) {
if (issue.fields.parent?.fields?.issuetype?.name == "Epic") {
module_issues.push({
issue_id: issue.id,
module_id: issue.fields.parent?.id,
});
} else {
child_issues.push(issue);
continue;
}
}
// skipping all the epics
if (issue.fields.issuetype.name === "Epic") {
modules.push(issue);
continue;
}
const user = members.find(
(user) => user.username === issue.fields.assignee?.displayName
);
// issue comments
const comments_list = [];
const comment_url = `https://${cloud_hostname}/rest/api/3/issue/${issue.id}/comment?expand=renderedBody`;
const commentResponse = await axios.get(comment_url, { auth, headers });
if (
commentResponse &&
commentResponse.data &&
commentResponse.data.total
) {
for await (const comment of loadComments(comment_url, auth)) {
comments_list.push({
comment_html:
comment.renderedBody === "" ? "<p></p>" : comment.renderedBody,
created_by: comment.updateAuthor.emailAddress,
});
}
}
const issuessync = {
args: [], // args
kwargs: {
data: {
type: "issue.create",
name: issue.fields.summary.substring(0, 250),
description_html: issue.renderedFields.description ?? null,
assignee: user?.email,
state: issue.fields.status.name,
priority:
issue.fields.priority.name.toLowerCase() === "medium"
? "medium"
: issue.fields.priority.name.toLowerCase() === "highest"
? "high"
: "low",
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
external_id: issue.id,
external_source: "jira",
comments_list: comments_list,
target_date: issue.fields.duedate,
link: {
title: `Original Issue in Jira ${issue.key}`,
url: `https://${cloud_hostname}/browse/${issue.key}`,
},
labels_list: issue.fields.labels,
parent_id: null,
},
},
};
this.mq?.publish(issuessync, "plane.bgtasks.importer_task.issue_sync");
}
for (const issue of child_issues) {
const user = members.find(
(user) => user.username === issue.fields.assignee?.displayName
);
// issue comments
const comments_list = [];
const comment_url = `https://${cloud_hostname}/rest/api/3/issue/${issue.id}/comment?expand=renderedBody`;
const commentResponse = await axios.get(comment_url, { auth, headers });
if (
commentResponse &&
commentResponse.data &&
commentResponse.data.total
) {
for await (const comment of loadComments(comment_url, auth)) {
comments_list.push({
comment_html:
comment.renderedBody === "" ? "<p></p>" : comment.renderedBody,
created_by: comment.updateAuthor.emailAddress,
});
}
}
const issuessync = {
args: [], // args
kwargs: {
data: {
type: "issue.create",
name: issue.fields.summary.substring(0, 250),
description_html: issue.renderedFields?.description,
assignee: user?.email,
state: issue.fields.status.name,
priority:
issue.fields.priority.name.toLowerCase() === "medium"
? "medium"
: issue.fields.priority.name.toLowerCase() === "highest"
? "high"
: "low",
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
external_id: issue.id,
external_source: "jira",
comments_list: comments_list,
target_date: issue.fields.duedate,
link: {
title: `Original Issue in Jira ${issue.key}`,
url: `https://${cloud_hostname}/browse/${issue.key}`,
},
labels_list: issue.fields.labels,
parent_id: issue.fields.parent.id,
},
},
};
this.mq?.publish(issuessync, "plane.bgtasks.importer_task.issue_sync");
}
// modules
for (const module of modules) {
const modulessync = {
args: [], // args
kwargs: {
data: {
type: "module.create",
name: module.fields.summary.substring(0, 250),
description_html: module.renderedFields?.description,
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
external_id: module.id,
external_source: "jira",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(
modulessync,
"plane.bgtasks.importer_task.module_sync"
);
}
for (const module_issue of module_issues) {
const modules_issue_sync = {
args: [], // args
kwargs: {
data: {
type: "module.create",
module_id: module_issue.module_id,
issue_id: module_issue.issue_id,
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
external_source: "jira",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(
modules_issue_sync,
"plane.bgtasks.importer_task.modules_issue_sync"
);
}
const import_sync = {
args: [], // args
kwargs: {
data: {
type: "import.create",
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
importer_id: importer_id,
status: "completed",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_sync");
return;
} catch (error) {
const workspace_id = req.body.workspace_id;
const project_id = req.body.project_id;
const created_by = req.body.created_by;
const importer_id = req.body.importer_id;
const import_sync = {
args: [], // args
kwargs: {
data: {
type: "import.create",
workspace_id: workspace_id,
project_id: project_id,
created_by: created_by,
importer_id: importer_id,
status: "failed",
},
}, // kwargs
other_data: {}, // other data
};
this.mq?.publish(import_sync, "plane.bgtasks.importer_task.import_sync");
return res.json({ message: "Server error", error: error });
}
}
}

View File

@@ -0,0 +1,199 @@
import { Request, Response } from "express";
import { Controller, Get, Post } from "@overnightjs/core";
import { getSlackMessageTemplate } from "../utils/slack/message-templates";
import { SlackService } from "../services/slack.service";
import { issueActivitySummary } from "../utils/slack/generateActivityMessage";
import { logger } from "../utils/logger";
import {
CreateIssueModalViewProjects,
notificationModal,
} from "../utils/slack/create-issue-modal";
import { ProjectService } from "../services/project.service";
import { convertToSlackOptions } from "../utils/slack/convert-to-slack-options";
import { processSlackPayload } from "../handlers/slack/core";
import { TSlackPayload } from "../types/slack";
import { MQSingleton } from "../mq/singleton";
import { DatabaseSingleton } from "../db/singleton";
import { eq } from "drizzle-orm";
import { users } from "../db/schema";
import { notifications } from "../db/schema/notifications.schema";
import { generateNotificationMessage } from "../utils/generateNotificationMessage";
@Controller("api/slack")
export class SlackController {
@Post("")
async postActivity(req: Request, res: Response) {
try {
// Initiate Slack Service
const slackService = new SlackService();
// Convert Slack message from the issueActivity provided from the request
const issueActivityData = req.body;
const activitySummary = await issueActivitySummary(issueActivityData);
// Convert message string into slack message blocks format
const issue_template = getSlackMessageTemplate(activitySummary);
// send message to slack
slackService.sendMessage(issue_template);
res.status(200);
return;
} catch (error) {
logger.error(error);
return res.json({ message: "Server error" });
}
}
@Post("trigger/ui/notifications/")
async triggerNotificationsModal(req: Request, res: Response) {
const slackService = new SlackService();
const db = DatabaseSingleton.getInstance().db;
const slackUserInfo = await slackService.getUserInfo(req.body.user_id);
const displayedSlackUser = await slackUserInfo?.json();
const planeUser = await db?.query.users.findFirst({
where: eq(users.email, displayedSlackUser?.user?.profile?.email),
});
if (planeUser === undefined || planeUser === null) {
console.log("no plane user");
res.status(200).send("");
}
const fetchedNotifications = await db?.query.notifications.findMany({
where: eq(notifications.receiverId, planeUser.id),
with: { triggeredBy: true, project: true, createdBy: true },
});
const notificationMessages: string[] = fetchedNotifications.map(
(notification) => generateNotificationMessage(notification),
);
const response = await slackService.openModal(
req.body.trigger_id,
notificationModal(notificationMessages),
);
const json = await response.json();
res.status(200);
}
@Post("trigger/ui/create-issue/")
async triggerCreateIssueModal(req: Request, res: Response) {
const slackService = new SlackService();
const projectService = new ProjectService();
const mq = MQSingleton.getInstance();
const text = req.body.text;
const teamId = req.body.team_id;
const workspaceId = await slackService.getWorkspaceId(teamId);
if (!workspaceId) {
return res.json({
response_type: "ephemeral",
text: "Workspace not found, Are you sure you have installed the slack in your plane workspace?",
});
}
if (text === "") {
const projectList =
await projectService.getProjectsForWorkspace(workspaceId);
const projectPlainTextOption = convertToSlackOptions(projectList);
await slackService.openModal(
req.body.trigger_id,
CreateIssueModalViewProjects(projectPlainTextOption),
);
res.status(200).send("");
} else {
const issue_parts = text.split(" ");
if (issue_parts.length < 3) {
return res.json({
response_type: "ephemeral",
text: "Incorrect format, please use the following format: /takeoff <project-identifier> <issue-title> <issue-description>",
});
}
const projectIdentifier = issue_parts[0].toUpperCase();
const issueTitle = issue_parts[1];
const issueDescription = issue_parts[2];
const project =
await projectService.getProjectByIdentifier(projectIdentifier);
if (!project) {
return res.json({
response_type: "ephemeral",
text: "Project not found",
});
}
const userId = req.body.user_id;
const userInfoResponse = await slackService.getUserInfo(userId);
if (!userInfoResponse) {
return res.json({
response_type: "ephemeral",
text: "Unable to get user info at this moment.",
});
}
const displayedUser = await userInfoResponse.json();
const issueSync = {
args: [],
kwargs: {
data: {
type: "slack.create_issue",
title: issueTitle,
description: issueDescription,
created_by: {
email: displayedUser?.user?.profile?.email,
name: displayedUser?.user?.name,
},
priority: "none",
workspace_id: workspaceId,
project_id: project.id,
assignees: [],
},
},
};
mq.publish(issueSync, "plane.bgtasks.importer_task.import_task");
res.json({
response_type: "ephemeral",
text: "Successfully created issue",
});
}
}
@Post("events")
async handleSlackEvents(req: Request, res: Response) {
const payload = JSON.parse(req.body.payload) as TSlackPayload;
const success = await processSlackPayload(payload);
if (!success) {
return res.json({
response_type: "ephemeral",
text: "Unable to process payload, please try again later.",
});
}
if (success && payload.type === "view_submission") {
return res.send({
response_action: "clear",
});
}
return res.json({
response_type: "ephemeral",
text: "Event Processed Successfully",
});
}
}

View File

@@ -0,0 +1,38 @@
import { uuid, text, boolean, pgTable } from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm";
import { workspaceIntegrations } from "./integrations.schema";
export const users = pgTable("users", {
id: uuid("id").primaryKey(),
username: text("username"),
email: text("email"),
firstName: text("first_name"),
lastName: text("last_name"),
isActive: boolean("is_active"),
role: text("role"),
isBot: boolean("is_bot"),
displayName: text("display_name"),
});
export const workspaces = pgTable("workspaces", {
id: uuid("id").primaryKey(),
name: text("name"),
slug: text("slug"),
createdById: uuid("created_by_id"),
ownerId: uuid("owner_id"),
});
export const workspacesRelations = relations(workspaces, ({ one }) => ({
createdBy: one(users, {
fields: [workspaces.createdById],
references: [users.id],
}),
owner: one(users, {
fields: [workspaces.ownerId],
references: [users.id],
}),
workspaceIntegrations: one(workspaceIntegrations, {
fields: [workspaces.id],
references: [workspaceIntegrations.workspaceId],
}),
}));

View File

@@ -0,0 +1,4 @@
export * from "./base.schema";
export * from "./integrations.schema";
export * from "./project.schema";
export * from "./notifications.schema";

View File

@@ -0,0 +1,79 @@
import {
integer,
jsonb,
uuid,
text,
boolean,
pgTable,
} from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm";
import { users, workspaces } from "./base.schema";
export const integrations = pgTable("integrations", {
id: uuid("id").primaryKey(),
title: text("title"),
provider: text("provider"),
network: integer("network"),
description: jsonb("description"),
author: text("author"),
webhookUrl: text("webhook_url"),
webhookSecret: text("webhook_secret"),
redirectUrl: text("redirect_url"),
metadata: jsonb("metadata"),
});
export const workspaceIntegrations = pgTable("workspace_integrations", {
id: uuid("id").primaryKey(),
metadata: jsonb("metadata"),
config: jsonb("config"),
actorId: uuid("actor_id"),
apiTokenId: uuid("api_token_id"),
integrationId: uuid("integration_id"),
workspaceId: uuid("workspace_id"),
});
export const apiTokens = pgTable("api_tokens", {
id: uuid("id").primaryKey(),
token: text("token"),
label: text("label"),
userType: integer("user_type"),
createdById: uuid("created_by_id"),
updatedById: uuid("updated_by_id"),
userId: uuid("user_id"),
workspaceId: uuid("workspace_id"),
description: text("description"),
isActive: boolean("is_active"),
});
export const workspaceIntegrationsRelations = relations(
workspaceIntegrations,
({ one }) => ({
actor: one(users, {
fields: [workspaceIntegrations.actorId],
references: [users.id],
}),
apiToken: one(apiTokens, {
fields: [workspaceIntegrations.apiTokenId],
references: [apiTokens.id],
}),
integration: one(integrations, {
fields: [workspaceIntegrations.integrationId],
references: [integrations.id],
}),
workspace: one(workspaces, {
fields: [workspaceIntegrations.workspaceId],
references: [workspaces.id],
}),
}),
);
export const apiTokensRelations = relations(apiTokens, ({ one }) => ({
user: one(users, {
fields: [apiTokens.userId],
references: [users.id],
}),
workspace: one(workspaces, {
fields: [apiTokens.workspaceId],
references: [workspaces.id],
}),
}));

View File

@@ -0,0 +1,52 @@
import { uuid, text, jsonb, timestamp, pgTable } from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm";
import { users, workspaces } from "./base.schema";
import { projects } from "./project.schema";
export const notifications = pgTable("notifications", {
id: uuid("id").primaryKey(),
data: jsonb("data"),
entityIdentifier: uuid("entity_identifier"),
entityName: text("entity_name"),
title: text("title"),
message: jsonb("message"),
messageHtml: text("message_html"),
messageStripped: text("message_stripped"),
sender: text("sender"),
readAt: timestamp("read_at"),
snoozedTill: timestamp("snoozed_till"),
archivedAt: timestamp("archived_at"),
createdById: uuid("created_by_id"),
projectId: uuid("project_id"),
receiverId: uuid("receiver_id").notNull(),
triggeredById: uuid("triggered_by_id"),
updatedById: uuid("updated_by_id"),
workspaceId: uuid("workspace_id").notNull(),
});
export const notificationsRelations = relations(notifications, ({ one }) => ({
receiver: one(users, {
fields: [notifications.receiverId],
references: [users.id],
}),
triggeredBy: one(users, {
fields: [notifications.triggeredById],
references: [users.id],
}),
project: one(projects, {
fields: [notifications.projectId],
references: [projects.id],
}),
updatedBy: one(users, {
fields: [notifications.updatedById],
references: [users.id],
}),
workspace: one(workspaces, {
fields: [notifications.workspaceId],
references: [workspaces.id],
}),
createdBy: one(users, {
fields: [notifications.createdById],
references: [users.id],
}),
}));

View File

@@ -0,0 +1,126 @@
import {
integer,
jsonb,
uuid,
text,
boolean,
pgTable,
} from "drizzle-orm/pg-core";
import { relations } from "drizzle-orm";
import { users, workspaces } from "./base.schema";
export const projects = pgTable("projects", {
id: uuid("id").primaryKey(),
name: text("name"),
description: text("description"),
descriptionText: jsonb("description_text"),
identifier: text("identifier"),
createdById: uuid("created_by_id"),
defaultAssigneeId: uuid("default_assignee_id"),
projectLeadId: uuid("project_lead_id"),
updatedById: uuid("updated_by_id"),
workspaceId: uuid("workspace_id"),
coverImage: text("cover_image"),
defaultStateId: uuid("default_state_id"),
});
export const states = pgTable("states", {
id: uuid("id").primaryKey(),
name: text("name"),
description: text("description"),
color: text("color"),
slug: text("slug"),
projectId: uuid("project_id"),
workspaceId: uuid("workspace_id"),
sequence: integer("sequence"),
group: text("group"),
default: boolean("default"),
});
export const projectMembers = pgTable("project_members", {
id: uuid("id").primaryKey(),
comment: text("comment"),
role: integer("role"),
memberId: uuid("member_id"),
projectId: uuid("project_id"),
workspaceId: uuid("workspace_id"),
viewProps: jsonb("view_props"),
defaultProps: jsonb("default_props"),
sortOrder: integer("sort_order"),
isActive: boolean("is_active"),
});
export const projectLabels = pgTable("labels", {
id: uuid("id").primaryKey(),
name: text("name"),
description: text("description"),
projectId: uuid("project_id"),
workspaceId: uuid("workspace_id"),
parentId: uuid("parent_id"),
color: text("color"),
sortOrder: integer("sort_order"),
});
export const projectlabelsRelations = relations(projectLabels, ({ one }) => ({
project: one(projects, {
fields: [projectLabels.projectId],
references: [projects.id],
}),
workspace: one(workspaces, {
fields: [projectLabels.workspaceId],
references: [workspaces.id],
}),
parent: one(projectLabels, {
fields: [projectLabels.parentId],
references: [projectLabels.id],
}),
}));
export const projectMembersRelations = relations(projectMembers, ({ one }) => ({
project: one(projects, {
fields: [projectMembers.projectId],
references: [projects.id],
}),
member: one(users, {
fields: [projectMembers.memberId],
references: [users.id],
}),
workspace: one(workspaces, {
fields: [projectMembers.workspaceId],
references: [workspaces.id],
}),
}));
export const statesRelations = relations(states, ({ one }) => ({
project: one(projects, {
fields: [states.projectId],
references: [projects.id],
}),
workspace: one(workspaces, {
fields: [states.workspaceId],
references: [workspaces.id],
}),
}));
export const projectsRelations = relations(projects, ({ one }) => ({
createdBy: one(users, {
fields: [projects.createdById],
references: [users.id],
}),
defaultAssignee: one(users, {
fields: [projects.defaultAssigneeId],
references: [users.id],
}),
projectLead: one(users, {
fields: [projects.projectLeadId],
references: [users.id],
}),
updatedBy: one(users, {
fields: [projects.updatedById],
references: [users.id],
}),
workspace: one(workspaces, {
fields: [projects.workspaceId],
references: [workspaces.id],
}),
}));

View File

@@ -0,0 +1,34 @@
import { drizzle, PostgresJsDatabase } from "drizzle-orm/postgres-js";
import postgres from "postgres";
import * as schema from "./schema";
// logger
import { logger } from "../utils/logger";
export class DatabaseSingleton {
private static instance: DatabaseSingleton;
public db: PostgresJsDatabase<typeof schema> | null = null;
private constructor() {
try {
// Ensure the DATABASE_URL is provided
if (!process.env.DATABASE_URL) {
throw new Error("DATABASE_URL environment variable is not set.");
}
const queryClient = postgres(process.env.DATABASE_URL);
this.db = drizzle(queryClient, { schema });
logger.info("🛢️ Connected to Database");
} catch (error) {
logger.error("Failed to initialize database connection:", error);
throw new Error("Could not connect to Database");
}
}
public static getInstance(): DatabaseSingleton {
if (!DatabaseSingleton.instance) {
DatabaseSingleton.instance = new DatabaseSingleton();
}
return DatabaseSingleton.instance;
}
}

View File

@@ -0,0 +1,16 @@
import { TBlockActionModalPayload, TBlockActionPayload } from "types/slack";
import { handleProjectSelectAction } from "./blockActions/handle-project-select-action";
export const handleBlockAction = async (
payload: TBlockActionPayload,
): Promise<boolean> => {
switch (payload.actions[0].action_id) {
case "project-select-action":
// When a user selects a project from the dropdown in create issue, this is the action that is triggered.
return await handleProjectSelectAction(
payload as TBlockActionModalPayload,
);
default:
return false;
}
};

View File

@@ -0,0 +1,78 @@
import { ProjectService } from "../../../services/project.service";
import { SlackService } from "../../../services/slack.service";
import { TBlockActionModalPayload } from "../../../types/slack";
import { priority } from "../../../utils/constants";
import { logger } from "../../../utils/logger";
import {
convertToSlackOption,
convertToSlackOptions,
} from "../../../utils/slack/convert-to-slack-options";
import { CreateIssueModalViewFull } from "../../../utils/slack/create-issue-modal";
export const handleProjectSelectAction = async (
payload: TBlockActionModalPayload,
) => {
try {
const slackService = new SlackService(),
projectService = new ProjectService();
const teamId = payload.team.id;
const workspaceId = await slackService.getWorkspaceId(teamId);
if (!payload.actions[0].selected_option || !workspaceId) {
return false;
}
const viewId = payload.view.id,
selectedProjectId = payload.actions[0].selected_option.value,
selectedProjectTitle = payload.actions[0].selected_option.text.text;
const states = await projectService.getProjectStates(selectedProjectId),
members = await projectService.getProjectMembers(selectedProjectId),
labels = await projectService.getProjectLabels(selectedProjectId),
projectList = await projectService.getProjectsForWorkspace(workspaceId);
if (!states || !members || !labels || !projectList) {
return false;
}
// convert to slack options
const selectedProjectOption = convertToSlackOption({
id: selectedProjectId,
name: selectedProjectTitle,
});
const projectOptions = convertToSlackOptions(projectList);
const stateOptions = convertToSlackOptions(states);
const labelOptions = convertToSlackOptions(labels);
const priorityOptions = priority.map((p) =>
convertToSlackOption({ id: p, name: p.toUpperCase() }),
);
const assigneeOptions = members.map((m) =>
convertToSlackOption({
id: m.member === null ? "" : m.member.id,
name:
m.member === null ? "" : m.member.firstName + " " + m.member.lastName,
}),
);
const updatedModalView = CreateIssueModalViewFull({
projectOptions,
stateOptions,
labelOptions,
priorityOptions,
assigneeOptions,
selectedProject: selectedProjectOption,
});
const response = await slackService.updateModal(viewId, updatedModalView);
if (response?.status !== 200) {
return false;
}
return true;
} catch (err) {
logger.error(err);
return false;
}
};

View File

@@ -0,0 +1,19 @@
import { TSlackPayload } from "types/slack";
import { handleBlockAction } from "./block-action-handler";
import { handleViewClosed } from "./view-close-handler";
import { handleViewSubmission } from "./view-submission-handler";
export const processSlackPayload = async (
payload: TSlackPayload,
): Promise<boolean> => {
switch (payload.type) {
case "block_actions":
return await handleBlockAction(payload);
case "view_submission":
return await handleViewSubmission(payload);
case "view_closed":
return await handleViewClosed(payload);
default:
return false;
}
};

View File

@@ -0,0 +1,7 @@
import { TViewClosedPayload } from "types/slack";
export const handleViewClosed = async (
payload: TViewClosedPayload,
): Promise<boolean> => {
return true;
};

View File

@@ -0,0 +1,13 @@
import { TViewSubmissionPayload } from "../../types/slack";
import { handleModalViewSubmission } from "./view-submissions/modal-view-submit-handler";
export const handleViewSubmission = async (
payload: TViewSubmissionPayload,
): Promise<boolean> => {
switch (payload.view.type) {
case "modal":
return await handleModalViewSubmission(payload);
default:
return false;
}
};

View File

@@ -0,0 +1,62 @@
import { MQSingleton } from "../../../mq/singleton";
import { TViewSubmissionPayload } from "../../../types/slack";
import { parseCreateIssueModalSubmission } from "../../../utils/slack/convert-create-issue-submission";
import { SlackService } from "../../../services/slack.service";
export const handleModalViewSubmission = async (
payload: TViewSubmissionPayload,
): Promise<boolean> => {
const stateValues = payload.view.state.values;
if (!stateValues) {
return false;
}
const submissionData = parseCreateIssueModalSubmission(stateValues);
const slackService = new SlackService();
const userInfoResponse = await slackService.getUserInfo(payload.user.id);
if (!userInfoResponse) {
return false;
}
const displayedUser = await userInfoResponse?.json();
const workspace_id = await slackService.getWorkspaceId(payload.team.id);
const mq = MQSingleton.getInstance();
const issueSync = {
args: [], // args
kwargs: {
data: {
type: "slack.create_issue",
title: submissionData.issueTitle,
description: submissionData.issueDescription,
priority: submissionData.priority,
state_id: submissionData.state?.id,
assignees: submissionData.assignees ?? [],
created_by: {
email: displayedUser?.user?.profile?.email,
name: displayedUser?.user?.name,
},
workspace_id: workspace_id,
project_id: submissionData.project?.id,
},
},
};
if (!mq) {
return false;
}
// Push the issue
await mq?.publish(issueSync, "plane.bgtasks.importer_task.import_task");
await slackService.sendEphemeralMessage(
payload.user.id,
"Successfully created issue",
);
return true;
};

View File

@@ -0,0 +1,22 @@
import { RequestHandler } from "express";
import { logger } from "../utils/logger";
const AuthKeyMiddleware: RequestHandler = (req, res, next) => {
// Retrieve the API key from the request header
const apiKey = req.headers["x-api-key"];
// Define the expected API key
const expectedApiKey = process.env.SEGWAY_KEY;
// Check if the API key is present and matches the expected key
if (apiKey === expectedApiKey) {
// If the key is valid, proceed with the next middleware or route handler
next();
} else {
// If the key is invalid, log the error and send an appropriate response
logger.error("Invalid API key");
res.status(401).json({ message: "Invalid API key" });
}
};
export default AuthKeyMiddleware;

View File

@@ -0,0 +1,8 @@
import { RequestHandler } from "express";
import { logger } from "../utils/logger";
const loggerMiddleware: RequestHandler = (req, res, next) => {
logger.info(`${req.method}: ${req.path}`);
next();
};
export default loggerMiddleware;

View File

@@ -0,0 +1,99 @@
//uuid
import { v4 as uuidv4 } from "uuid"
// mq
import { Connection, Channel, connect, ConsumeMessage } from "amqplib";
// utils
import { logger } from "../utils/logger";
export class MQSingleton {
private static instance: MQSingleton;
private connection: Connection | null = null;
public channel: Channel | null = null;
private constructor() {}
// Get the current instance
public static getInstance(): MQSingleton {
if (!this.instance) {
this.instance = new MQSingleton();
}
return this.instance;
}
// Initialize instance
public async initialize(): Promise<void> {
if (!this.connection || !this.channel) {
await this.init();
}
}
private async init(): Promise<void> {
const rabbitMqUrl = process.env.RABBITMQ_URL || "";
try {
this.connection = await connect(rabbitMqUrl);
logger.info(`✅ Rabbit MQ Connection is ready`);
this.channel = await this.connection.createChannel();
logger.info(`🛸 Created RabbitMQ Channel successfully`);
} catch (error) {
console.error("Error in initializing RabbitMQ:", error);
}
}
// Send the message to the given queue
public async publish(body: object, taskName: string): Promise<void> {
// Check if the channel exists
if (!this.channel) {
throw new Error("Channel not initialized");
}
// Initialize the queue variables
const queue = "segway_tasks";
const exchange = "segway_exchange";
const routingKey = "segway";
// Create this message
const msg = {
contentType: "application/json",
contentEncoding: "utf-8",
headers: {
id: uuidv4(),
task: taskName,
},
body: JSON.stringify(body),
};
// Assert the queue
await this.channel.assertExchange(exchange, "direct", { durable: true });
await this.channel.assertQueue(queue, { durable: true });
await this.channel.bindQueue(queue, exchange, routingKey);
// Try publishing the message
try {
this.channel.publish(exchange, routingKey, Buffer.from(msg.body), {
contentType: msg.contentType,
contentEncoding: msg.contentEncoding,
headers: msg.headers
});
} catch (error) {
console.error("Error publishing message:", error);
}
}
// Receive the message from the given queue
public async consume(
queue: string,
callback: (msg: ConsumeMessage | null) => void
): Promise<void> {
if (!this.channel) {
throw new Error("Channel not initialized");
}
logger.info("👂 Listening for incoming events");
const exchange = "django_exchange";
const routingKey = "django.node";
await this.channel.assertExchange(exchange, "direct", { durable: true });
await this.channel.assertQueue(queue, { durable: true });
await this.channel.bindQueue(queue, exchange, routingKey);
await this.channel.consume(queue, callback, { noAck: true });
}
}

161
segway/src/server.ts Normal file
View File

@@ -0,0 +1,161 @@
import dotenv from "dotenv";
import path from "path";
import express from "express";
import { Server } from "@overnightjs/core";
import cors from "cors";
import * as Sentry from "@sentry/node";
import * as Tracing from "@sentry/tracing";
import { PostgresJsDatabase } from "drizzle-orm/postgres-js";
// controllers
import * as controllers from "./controller";
// middlewares
import loggerMiddleware from "./middleware/logger.middleware";
// utils
import { logger } from "./utils/logger";
// db
import { DatabaseSingleton } from "./db/singleton";
// mq
import { MQSingleton } from "./mq/singleton";
class ApiServer extends Server {
private readonly SERVER_STARTED = "🚀 Api server started on port: ";
SERVER_PORT: number;
db: PostgresJsDatabase | null = null;
mq: MQSingleton | null = null; // Declare the channel property
constructor() {
super(true);
// disabling overnight logs
this.showLogs = false;
// enabling env variable from .env file
dotenv.config();
// assigning port
this.SERVER_PORT = process.env.SERVER_PORT
? parseInt(process.env.SERVER_PORT, 10)
: 8080;
// logger
this.app.use(loggerMiddleware);
// exposing public folder for static files.
this.app.use(express.static("public"));
// body parser
this.app.use(express.json());
this.app.use(express.urlencoded({ extended: true }));
// views engine
this.app.set("views", path.join(__dirname, "views"));
this.app.set("view engine", "hbs");
// cors
this.app.use(cors());
// setup mq
this.setupMQ();
// sentry setup
if (
process.env.APP_ENV === "staging" ||
process.env.APP_ENV === "production"
) {
// setting up error logging and tracing.
this.setupSentryInit();
}
// setting up db
this.setupDatabase();
// setting up controllers
this.setupControllers();
// not found page
this.setupNotFoundHandler();
// setting up sentry error handling
this.sentryErrorHandling();
}
// get the current app instance
public getAppInstance() {
return this.app;
}
// Setup the database
private setupDatabase(): void {
this.db = DatabaseSingleton.getInstance().db;
}
// Setup MQ and initialize channel
private setupMQ(): void {
this.mq = MQSingleton.getInstance();
this.startMQAndWorkers().catch((error) =>
logger.error("Error in startMQAndWorkers:", error)
);
}
// Start mq and workers
private async startMQAndWorkers(): Promise<void> {
try {
await this.mq?.initialize();
} catch (error) {
logger.error("Failed to initialize MQ:", error);
}
}
// setup all the controllers
private setupControllers(): void {
const controllerInstances = [];
for (const name in controllers) {
if (Object.prototype.hasOwnProperty.call(controllers, name)) {
const Controller = (controllers as any)[name];
controllerInstances.push(new Controller(this.db, this.mq));
}
}
super.addControllers(controllerInstances);
}
// This controller will return 404 for not found pages
private setupNotFoundHandler(): void {
this.app.use((req, res) => {
res.status(404).json({
status: "error",
message: "Not Found",
path: req.path,
});
});
}
private setupSentryInit() {
Sentry.init({
dsn: process.env.SENTRY_DSN,
integrations: [
// enable HTTP calls tracing
new Sentry.Integrations.Http({ tracing: true }),
// enable Express.js middleware tracing
new Tracing.Integrations.Express({ app: this.app }),
],
// Set tracesSampleRate to 1.0 to capture 100%
// of transactions for performance monitoring.
// We recommend adjusting this value in production
tracesSampleRate: 1.0,
});
// RequestHandler creates a separate execution context using domains, so that every
// transaction/span/breadcrumb is attached to its own Hub instance
this.app.use(Sentry.Handlers.requestHandler());
// TracingHandler creates a trace for every incoming request
this.app.use(Sentry.Handlers.tracingHandler());
}
private sentryErrorHandling() {
// The error handler must be before any other error middleware and after all controllers
this.app.use(Sentry.Handlers.errorHandler());
this.app.use(function onError(req, res: any) {
// The error id is attached to `res.sentry` to be returned
// and optionally displayed to the user for support.
res.statusCode = 500;
res.end(res.sentry + "\n");
});
}
public start(port: number): void {
this.app.listen(port, () => {
logger.info(this.SERVER_STARTED + port);
});
}
}
export default ApiServer;

View File

@@ -0,0 +1 @@
class IssueService {}

View File

@@ -0,0 +1,92 @@
import { eq, sql } from "drizzle-orm";
import { DatabaseSingleton } from "../db/singleton";
import {
projectLabels,
projectMembers,
projects,
states,
} from "../db/schema/project.schema";
export class ProjectService {
async getProjectByIdentifier(identifier: string) {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
throw new Error("Database not found");
}
try {
const project = await db.query.projects.findFirst({
where: eq(projects.identifier, identifier),
});
return project;
} catch (error) {
throw new Error("Database not found");
}
}
async getProjectsForWorkspace(workspaceId: string) {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
throw new Error("Database not found");
}
const selectedProjects = db
.select({
name: projects.name,
id: projects.id,
})
.from(projects)
.where(sql`${projects.workspaceId} = ${workspaceId}`);
return selectedProjects;
}
async getProjectStates(projectId: string) {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
throw new Error("Database not found");
}
const projectStates = await db.query.states.findMany({
where: eq(states.projectId, projectId),
});
return projectStates;
}
async getProjectMembers(projectId: string) {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
throw new Error("Database not found");
}
try {
const members = await db.query.projectMembers.findMany({
where: eq(projectMembers.projectId, projectId),
with: { member: true },
});
return members;
} catch (error) {
throw new Error("Database not found");
}
}
async getProjectLabels(projectId: string) {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
throw new Error("Database not found");
}
try {
const labels = await db.query.projectLabels.findMany({
where: eq(projectLabels.projectId, projectId),
});
return labels;
} catch (error) {
throw new Error("Database not found");
}
}
}

View File

@@ -0,0 +1,190 @@
import { logger } from "@sentry/utils";
import { DatabaseSingleton } from "../db/singleton";
import {
integrations,
workspaceIntegrations,
} from "../db/schema/integrations.schema";
import { gte, sql } from "drizzle-orm";
import { TSlackConfig, TSlackMetadata } from "types/slack";
export class SlackService {
async getWorkspaceId(teamId: string): Promise<string | undefined> {
const db = DatabaseSingleton.getInstance().db;
if (!db) {
console.log("no db");
return;
}
const workspace = await db.query.workspaceIntegrations.findFirst({
where: gte(workspaceIntegrations.metadata, { team: { id: teamId } }),
});
if (workspace && workspace.workspaceId) {
return workspace.workspaceId;
}
}
async sendEphemeralMessage(userId: string, text: string) {
const slackConfig = await this.getConfig();
const slackMetadata = await this.getMetadata();
if (!slackConfig || !slackMetadata) {
return;
}
try {
await fetch("https://slack.com/api/chat.postEphemeral", {
method: "POST",
body: JSON.stringify({
user: userId,
channel: slackMetadata.slackMetadata.incoming_webhook.channel_id,
text: text,
}),
headers: {
authorization: `Bearer ${slackConfig.slackConfig.access_token}`,
"content-type": "application/json",
},
});
} catch (error) {
logger.error(error);
}
}
async getUserInfo(userId: string) {
const slackConfig = await this.getConfig();
if (!slackConfig) {
return;
}
const slackAccessToken = slackConfig.slackConfig.access_token;
try {
return await fetch(`https://slack.com/api/users.info?user=${userId}`, {
method: "GET",
headers: {
"content-type": "application/json",
authorization: `Bearer ${slackAccessToken}`,
},
});
} catch (error) {
logger.error(error);
}
}
async getIntegration(): Promise<{ integrationId: string } | void> {
const db = DatabaseSingleton.getInstance().db;
if (db) {
const slackIntegration = await db
.select({
integrationId: integrations.id,
})
.from(integrations)
.where(sql`${integrations.provider} = 'slack'`);
if (slackIntegration.length > 0) {
return slackIntegration[0];
}
}
}
async getConfig(): Promise<{ slackConfig: TSlackConfig } | void> {
const db = DatabaseSingleton.getInstance().db;
const integration = await this.getIntegration();
if (db && integration) {
const slackConfig = await db
.select({
slackConfig: workspaceIntegrations.config,
})
.from(workspaceIntegrations)
.where(
sql`${workspaceIntegrations.integrationId} = ${integration.integrationId}`,
);
if (slackConfig.length > 0) {
return slackConfig[0] as { slackConfig: TSlackConfig };
}
}
}
async getMetadata(): Promise<{ slackMetadata: TSlackMetadata } | void> {
const db = DatabaseSingleton.getInstance().db;
const integration = await this.getIntegration();
if (db && integration) {
const slackMetadata = await db
.select({
slackMetadata: workspaceIntegrations.metadata,
})
.from(workspaceIntegrations)
.where(
sql`${workspaceIntegrations.integrationId} = ${integration.integrationId}`,
);
if (slackMetadata.length > 0) {
return slackMetadata[0] as { slackMetadata: TSlackMetadata };
}
}
}
async sendMessage(template: { text: string; blocks: any[] }) {
const slackMetadata = await this.getMetadata();
if (!slackMetadata) {
return;
}
const slackWebhookUrl = slackMetadata?.slackMetadata.incoming_webhook.url;
try {
await fetch(slackWebhookUrl, {
method: "POST",
body: JSON.stringify(template),
headers: {
"content-type": "application/json",
},
});
} catch (error) {
logger.error(error);
}
}
async openModal(triggerId: string, modal: any) {
const slackConfig = await this.getConfig();
if (!slackConfig) {
return;
}
const slackAccessToken = slackConfig.slackConfig.access_token;
try {
return await fetch("https://slack.com/api/views.open", {
method: "POST",
body: JSON.stringify({
trigger_id: triggerId,
view: modal,
}),
headers: {
"content-type": "application/json",
authorization: `Bearer ${slackAccessToken}`,
},
});
} catch (error) {
logger.error(error);
}
}
async updateModal(viewId: string, updatedModal: any) {
const slackConfig = await this.getConfig();
if (!slackConfig) {
return;
}
const slackAccessToken = slackConfig.slackConfig.access_token;
try {
return await fetch("https://slack.com/api/views.update", {
method: "POST",
body: JSON.stringify({
view_id: viewId,
view: updatedModal,
}),
headers: {
"Content-type": "application/json",
Authorization: `Bearer ${slackAccessToken}`,
},
});
} catch (error) {
console.log(error);
logger.error(error);
}
}
}

5
segway/src/start.ts Normal file
View File

@@ -0,0 +1,5 @@
import ApiServer from "./server";
const apiServer = new ApiServer();
// starting server
apiServer.start(apiServer.SERVER_PORT);

147
segway/src/types/slack.ts Normal file
View File

@@ -0,0 +1,147 @@
import { type } from "os";
export type TSlackConfig = {
team_id: string;
access_token: string;
};
export type TSlackMetadata = {
ok: boolean;
team: {
id: string;
name: string;
};
scope: string;
app_id: string;
enterprise?: any;
token_type: string;
authed_user: {
id: string;
};
bot_user_id: string;
access_token: string;
incoming_webhook: Incomingwebhook;
is_enterprise_install: boolean;
};
export type Incomingwebhook = {
url: string;
channel: string;
channel_id: string;
configuration_url: string;
};
export type TSlackPayload =
| TBlockActionPayload
| TViewSubmissionPayload
| TViewClosedPayload;
export type TBlockActionPayload = {
type: "block_actions";
team: ISlackTeam;
user: ISlackUser;
api_app_id: string;
token: string;
hash: string;
interactivity: any;
bot_access_token: string;
container: ISlackContainer;
trigger_id: string;
actions: ISlackAction[];
};
export type TViewSubmissionPayload = {
type: "view_submission";
team: ISlackTeam;
user: ISlackUser;
api_app_id: string;
token: string;
trigger_id: string;
view: ISlackView;
};
export type TViewClosedPayload = {
type: "view_closed";
};
export type TBlockActionMessagePayload = TBlockActionPayload & {
message: ISlackMessage;
channel_id: string;
};
export type TBlockActionModalPayload = TBlockActionPayload & {
view: ISlackView;
};
export interface ISlackView {
id: string;
team_id: string;
type: string;
blocks: any[];
private_metadata: string;
callback_id: string;
state: {
values: Record<string, any>;
};
hash: string;
title: ISlackText;
clear_on_close: boolean;
notify_on_close: boolean;
close: ISlackText;
submit: ISlackText;
previous_view_id?: any;
root_view_id: string;
app_id: string;
external_id: string;
app_installed_team_id: string;
bot_id: string;
}
export interface ISlackTeam {
id: string;
domain: string;
}
export interface ISlackUser {
id: string;
username: string;
team_id: string;
}
export interface ISlackContainer {
type: string;
message_ts: string;
attachment_id: number;
channel_id: string;
is_ephemeral: boolean;
is_app_unfurl: boolean;
}
export interface ISlackChannel {
id: string;
name: string;
}
export interface ISlackMessage {
bot_id: string;
type: string;
text: string;
user: string;
ts: string;
}
export interface ISlackAction {
action_id: string;
block_id: string;
selected_option?: {
text: ISlackText;
value: string;
};
type: string;
action_ts: string;
}
export interface ISlackText {
type: string;
text: string;
emoji: boolean;
}

View File

@@ -0,0 +1 @@
export const priority = ["urgent", "high", "medium", "low", "none"];

View File

@@ -0,0 +1,30 @@
export function generateNotificationMessage(notification: any) {
// Extract necessary details from notification
const triggeredBy = notification.triggeredBy.is_bot
? notification.triggeredBy.firstName
: notification.triggeredBy.displayName;
const issueActivityField = notification.data.issue_activity.field;
const issueActivityVerb = notification.data.issue_activity.verb;
const issueActivityNewValue = notification.data.issue_activity.new_value;
// Generate notification message
let message = `${triggeredBy} `;
if (issueActivityField !== "comment" && issueActivityVerb) {
message += `${issueActivityVerb} `;
}
if (issueActivityField === "comment") {
message += "commented ";
} else if (issueActivityField !== "None") {
message += `${replaceUnderscoreIfSnakeCase(issueActivityField)} to `;
}
if (issueActivityNewValue) {
message += issueActivityNewValue;
} else {
message += "the issue and assigned it to you.";
}
return message;
}
export const replaceUnderscoreIfSnakeCase = (str: string) =>
str.replace(/_/g, " ");

View File

@@ -0,0 +1,25 @@
import { Octokit } from 'octokit'
import { createAppAuth } from '@octokit/auth-app'
export const getOctokit = async (installationId: number): Promise<Octokit> => {
const appId = process.env.GITHUB_APP_ID || "";
const privateKey = process.env.GITHUB_APP_PRIVATE_KEY || "";
if (!privateKey || !appId) {
throw new Error("Private key and App ID not found in environment variables.");
}
// Initiate the octokit
const octokit = new Octokit({
authStrategy: createAppAuth,
auth: {
appId: appId,
privateKey: privateKey,
installationId: installationId
}
})
return octokit;
}

View File

@@ -0,0 +1,8 @@
import winston from "winston";
export const logger = winston.createLogger({
transports: [
new winston.transports.Console(),
new winston.transports.File({ filename: "combined.log" }),
],
});

View File

@@ -0,0 +1,73 @@
import axios from "axios";
async function* pageThroughIssues(endpoint: string, auth: any) {
async function* makeRequest(_endpoint: string): AsyncGenerator<any> {
const response = await axios({
url: _endpoint,
method: "get",
auth: auth,
});
if (response.status !== 200) {
throw new Error(await response.statusText);
}
const page = await response.data;
yield page;
if (page.issues.length) {
const url: string = `${endpoint}&startAt=${page.startAt + 100}`;
yield* makeRequest(url);
}
}
yield* makeRequest(endpoint);
}
export async function* loadIssues(url: any, auth: any) {
const endpoint = url;
const result = pageThroughIssues(endpoint, auth);
for await (const page of result) {
for (const issue of page.issues) {
yield issue;
}
}
}
async function* pageThroughComments(endpoint: any, auth: any) {
async function* makeRequest(_endpoint: string): AsyncGenerator<any> {
const response = await axios({
url: _endpoint,
method: "get",
auth: auth,
});
if (response.status !== 200) {
throw new Error(await response.statusText);
}
const page = await response.data;
yield page;
if (page.comments.length) {
const url: string = `${endpoint}&startAt=${page.startAt + 100}`;
yield* makeRequest(url);
}
}
yield* makeRequest(endpoint);
}
export async function* loadComments(url: any, auth: any) {
const endpoint = url;
const result = pageThroughComments(endpoint, auth);
for await (const page of result) {
for (const comment of page.comments) {
yield comment;
}
}
}

View File

@@ -0,0 +1,68 @@
import { PlainTextOption } from "./create-issue-modal";
function extractFromActionId(
stateValues: Record<string, any>,
actionId: string,
) {
for (const blockId in stateValues) {
const block = stateValues[blockId];
if (block[actionId]) {
return block[actionId];
}
}
return null;
}
export function parseCreateIssueModalSubmission(
stateValues: Record<string, any>,
) {
const projectAction = extractFromActionId(
stateValues,
"project-select-action",
);
const issueTitleAction = extractFromActionId(stateValues, "sl_input");
const issueDescriptionAction = extractFromActionId(stateValues, "ml_input");
const assigneesAction = extractFromActionId(
stateValues,
"multi_static_select-action",
);
const stateAction = extractFromActionId(stateValues, "state-select-action");
const priorityAction = extractFromActionId(
stateValues,
"priority-select-action",
);
return {
project:
projectAction && projectAction.selected_option
? {
id: projectAction.selected_option.value,
name: projectAction.selected_option.text.text,
}
: null,
issueTitle:
issueTitleAction && issueTitleAction.value ? issueTitleAction.value : "",
issueDescription:
issueDescriptionAction && issueDescriptionAction.value
? issueDescriptionAction.value
: "",
assignees:
assigneesAction && assigneesAction.selected_options
? assigneesAction.selected_options.map((option: PlainTextOption) => ({
id: option.value,
name: option.text.text,
}))
: [],
state:
stateAction && stateAction.selected_option
? {
id: stateAction.selected_option.value,
name: stateAction.selected_option.text.text,
}
: null,
priority:
priorityAction && priorityAction.selected_option
? priorityAction.selected_option.value
: "none",
};
}

View File

@@ -0,0 +1,20 @@
import { PlainTextOption } from "./create-issue-modal";
export const convertToSlackOption = (point: {
id: string;
name: string | null;
}): PlainTextOption => ({
text: {
type: "plain_text",
text: point.name === null ? "" : point.name,
emoji: true,
},
value: point.id,
});
export const convertToSlackOptions = (
data: Array<{
id: string;
name: string | null;
}>,
): Array<PlainTextOption> => data.map((point) => convertToSlackOption(point));

View File

@@ -0,0 +1,229 @@
export const CreateIssueModalViewFull = ({
selectedProject,
projectOptions,
stateOptions,
priorityOptions,
labelOptions,
assigneeOptions,
}: {
selectedProject: PlainTextOption;
projectOptions: Array<PlainTextOption>;
stateOptions: Array<PlainTextOption>;
priorityOptions: Array<PlainTextOption>;
labelOptions: Array<PlainTextOption>;
assigneeOptions: Array<PlainTextOption>;
}) => ({
type: "modal",
title: {
type: "plain_text",
text: "Create Issue",
emoji: true,
},
submit: {
type: "plain_text",
text: "Create Issue",
emoji: true,
},
close: {
type: "plain_text",
text: "Discard Issue",
emoji: true,
},
blocks: [
{
dispatch_action: true,
type: "input",
element: {
type: "static_select",
placeholder: {
type: "plain_text",
text: "Select a Project",
emoji: true,
},
initial_option: selectedProject,
options: projectOptions,
action_id: "project-select-action",
},
label: {
type: "plain_text",
text: "Project",
emoji: true,
},
},
{
type: "input",
element: {
type: "plain_text_input",
action_id: "sl_input",
placeholder: {
type: "plain_text",
text: "Issue Title",
},
},
label: {
type: "plain_text",
text: "Title",
emoji: true,
},
},
{
type: "input",
optional: true,
element: {
type: "plain_text_input",
action_id: "ml_input",
multiline: true,
placeholder: {
type: "plain_text",
text: "Issue Description (Optional)",
},
},
label: {
type: "plain_text",
text: "Description",
},
},
{
type: "input",
element: {
type: "static_select",
placeholder: {
type: "plain_text",
text: "Select a State",
emoji: true,
},
options: stateOptions,
action_id: "state-select-action",
},
label: {
type: "plain_text",
text: "State",
emoji: true,
},
},
{
type: "input",
optional: true,
element: {
type: "static_select",
placeholder: {
type: "plain_text",
text: "Select a Priority (Optional)",
emoji: true,
},
options: priorityOptions,
action_id: "priority-select-action",
},
label: {
type: "plain_text",
text: "Priority",
emoji: true,
},
},
// {
// type: "input",
// element: {
// type: "multi_static_select",
// placeholder: {
// type: "plain_text",
// text: "Labels (Optional)",
// emoji: true,
// },
// options: labelOptions,
// action_id: "multi_static_select-action",
// },
// label: {
// type: "plain_text",
// text: "Labels",
// emoji: true,
// },
// },
{
type: "input",
optional: true,
element: {
type: "multi_static_select",
placeholder: {
type: "plain_text",
text: "Assignees (Optional)",
emoji: true,
},
options: assigneeOptions,
action_id: "multi_static_select-action",
},
label: {
type: "plain_text",
text: "Assignees",
emoji: true,
},
},
],
});
export type PlainTextOption = {
text: {
type: "plain_text";
text: string;
emoji: true;
};
value: string;
};
export const CreateIssueModalViewProjects = (
projects: Array<PlainTextOption>,
) => ({
type: "modal",
title: {
type: "plain_text",
text: "Create Issue",
emoji: true,
},
close: {
type: "plain_text",
text: "Discard Issue",
emoji: true,
},
blocks: [
{
dispatch_action: true,
type: "input",
element: {
type: "static_select",
placeholder: {
type: "plain_text",
text: "Select a Project",
emoji: true,
},
options: projects,
action_id: "project-select-action",
},
label: {
type: "plain_text",
text: "Project",
emoji: true,
},
},
],
});
export const notificationModal = (notifcationsText: string[]) => ({
type: "Notifications",
title: {
type: "plain_text",
text: "Plane",
emoji: true,
},
close: {
type: "plain_text",
text: "Cancel",
emoji: true,
},
blocks: notifcationsText.map((notification: string) => ({
type: "section",
text: {
type: "plain_text",
text: notification,
emoji: true,
},
})),
});

View File

@@ -0,0 +1,229 @@
export const addSpaceIfCamelCase = (str: string) =>
str.replace(/([a-z])([A-Z])/g, "$1 $2");
export const stripHtmlTags = (htmlString: string) =>
htmlString.replace(/(<([^>]+)>)/gi, "");
export const renderShortNumericDateFormat = (date: string | Date) =>
new Date(date).toLocaleDateString("en-UK", {
day: "numeric",
month: "short",
});
const activityDetails: {
[key: string]: {
message?: string;
};
} = {
blocked_by: {
message: "marked this issue being blocked by",
},
blocking: {
message: "marked this issue is blocking",
},
cycles: {
message: "set the cycle to",
},
labels: {},
modules: {
message: "set the module to",
},
state: {
message: "set the state to",
},
priority: {
message: "set the priority to",
},
name: {
message: "edited the name to",
},
description: {
message: "updated the description.",
},
start_date: {
message: "set the start date to",
},
target_date: {
message: "set the due date to",
},
parent: {
message: "set the parent to",
},
estimate_point: {
message: "set the estimate point to",
},
link: {
message: "updated the link",
},
attachment: {
message: "updated the attachment",
},
comment: {
message: "added a comment on the issue",
},
};
export const issueActivitySummary = async (activityItem) => {
let action =
activityDetails[activityItem.field as keyof typeof activityDetails]
?.message;
let plainText = "";
if (activityItem.field === "labels") {
action =
activityItem.new_value !== "" ? "added a new label" : "removed the label";
} else if (activityItem.field === "assignees") {
action =
activityItem.new_value !== ""
? "added a new assignee"
: "removed the assignee";
} else if (activityItem.field === "blocking") {
action =
activityItem.new_value !== ""
? "marked this issue is blocking"
: "removed the issue from blocking";
} else if (activityItem.field === "blocked_by") {
action =
activityItem.new_value !== ""
? "marked this issue being blocked by"
: "removed blocker";
} else if (activityItem.field === "relates_to") {
action =
activityItem.new_value !== ""
? "marked this issue being related to"
: "removed related issue";
} else if (activityItem.field === "duplicate") {
action =
activityItem.new_value !== ""
? "marked this issue being duplicate to"
: "removed duplicate issue";
} else if (activityItem.field === "target_date") {
action =
activityItem.new_value && activityItem.new_value !== ""
? "set the due date to"
: "removed the due date";
} else if (activityItem.field === "parent") {
action =
activityItem.new_value && activityItem.new_value !== ""
? "set the parent to"
: "removed the parent";
} else if (activityItem.field === "priority") {
action =
activityItem.new_value && activityItem.new_value !== ""
? "set the priority to"
: "removed the priority";
} else if (activityItem.field === "description") {
action = "updated the";
} else if (activityItem.field === "attachment") {
action = `${activityItem.verb} an`;
} else if (activityItem.field === "link") {
action =
activityItem.verb === "created"
? `${activityItem.verb} a`
: `${activityItem.verb} a`;
} else if (activityItem.field === "estimate_point") {
action = "set the estimate point to";
} else if (activityItem.field === "cycles") {
action =
activityItem.new_value !== ""
? "set the cycle to"
: "removed it from cycle";
} else if (activityItem.field === "modules") {
action =
activityItem.new_value !== ""
? "set the module to"
: "removed it from module";
} else if (activityItem.field === "comment") {
action =
activityItem.verb === "updated"
? "edited a comment on the issue"
: "added a comment on the issue";
}
let value: any = activityItem.new_value
? activityItem.new_value
: activityItem.old_value;
// for values that are after the action clause
if (
activityItem.verb === "created" &&
activityItem.field !== "cycles" &&
activityItem.field !== "modules" &&
activityItem.field !== "attachment" &&
activityItem.field !== "link" &&
activityItem.field !== "estimate_point" &&
activityItem.field !== "comment" &&
activityItem.field !== "blocking" &&
activityItem.field !== "blocked_by" &&
activityItem.field !== "duplicate" &&
activityItem.field !== "relates_to"
) {
value = "created this issue.";
} else if (activityItem.field === "state") {
value = activityItem.new_value
? addSpaceIfCamelCase(activityItem.new_value)
: "None";
} else if (activityItem.field === "labels") {
let name;
let id = "#000000";
if (activityItem.new_value !== "") {
name = activityItem.new_value;
id = activityItem.new_identifier ? activityItem.new_identifier : id;
} else {
name = activityItem.old_value;
id = activityItem.old_identifier ? activityItem.old_identifier : id;
}
value = name;
} else if (activityItem.field === "assignees") {
value = activityItem.new_value;
} else if (activityItem.field === "target_date") {
const date =
activityItem.new_value && activityItem.new_value !== ""
? activityItem.new_value
: activityItem.old_value;
value = renderShortNumericDateFormat(date as string);
} else if (activityItem.field === "description") {
value = "description";
} else if (activityItem.field === "attachment") {
value = "attachment";
} else if (activityItem.field === "cycles") {
if (activityItem.verb === "deleted") {
value = `${activityItem.old_value}`;
} else {
value = activityItem.new_value;
}
} else if (activityItem.field === "modules") {
if (activityItem.verb === "deleted") {
value = `${activityItem.old_value}`;
} else {
value = activityItem.new_value;
}
} else if (activityItem.field === "comment") {
value = stripHtmlTags(activityItem.new_value);
} else if (activityItem.field === "link") {
plainText = "link";
value =
activityItem.verb === "created"
? `<${activityItem.new_value}|Link>`
: "link";
}
let activityString = "";
let plainTextActivityString = "";
if (action) {
if (plainText !== "") {
plainTextActivityString = `${activityItem.actor_detail.first_name} ${activityItem.actor_detail.last_name} ${action} ${plainText}`;
}
activityString = `${activityItem.actor_detail.first_name} ${activityItem.actor_detail.last_name} ${action} ${value}`;
} else {
if (plainText !== "") {
plainTextActivityString = `${activityItem.actor_detail.first_name} ${activityItem.actor_detail.last_name} ${action} ${plainText}`;
}
activityString = `${activityItem.actor_detail.first_name} ${activityItem.actor_detail.last_name} ${value}`;
}
return {
activityString: activityString,
plainTextActivityString: plainTextActivityString,
};
};

View File

@@ -0,0 +1,43 @@
export const getSlackMessageTemplate = (message: {
activityString: string;
plainTextActivityString: string;
}) => ({
text: `${
message.plainTextActivityString === ""
? message.activityString
: message.plainTextActivityString
}`,
blocks: [
// {
// type: "header",
// text: {
// type: "plain_text",
// text: `${issue.name}`,
// emoji: true,
// },
// },
// {
// type: "section",
// fields: [
// {
// type: "mrkdwn",
// text: `*Issue Id:*\n${issue.projects.identifier}-${issue.sequence_id}`,
// },
// ],
// },
// {
// type: "section",
// text: {
// type: "mrkdwn",
// text: `*Activity:*\n ${message.activityString}`,
// },
// },
// {
// type: "section",
// text: {
// type: "mrkdwn",
// text: `<${process.env.WEB_URL}/${issue.projects.workspaces.slug}/projects/${issue.project_id}/issues/${issue.id}|View Issue>`,
// },
// },
],
});

View File

@@ -0,0 +1,42 @@
// mq
import { ConsumeMessage } from "amqplib";
// mq single ton
import { MQSingleton } from "../mq/singleton";
// logger
import { logger } from "../utils/logger";
export abstract class BaseWorker {
mq: MQSingleton | null = null;
protected routingKey: string;
constructor(
protected queueName: string,
routingKey: string
) {
this.mq = MQSingleton.getInstance();
this.routingKey = routingKey;
this.onMessage = this.onMessage.bind(this);
}
// Start the consumer
public async start(): Promise<void> {
try {
this.mq?.consume(this.queueName, this.onMessage);
} catch (error) {
logger.error("Error starting workers");
}
}
// Publish this to queue
protected async publish(body: object, taskName: string): Promise<void> {
try {
this.mq?.publish(body, taskName);
} catch (error) {
logger.error("Error sending to queue");
}
}
protected abstract onMessage(msg: ConsumeMessage | null): void;
}

9
segway/tsconfig.json Normal file
View File

@@ -0,0 +1,9 @@
{
"extends": "tsconfig/express.json",
"include": ["src/**/*.ts"],
"exclude": ["node_modules"],
"compilerOptions": {
"baseUrl": "src/",
"outDir": "./dist"
}
}

2988
segway/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -19,7 +19,7 @@ const useIntegrationPopup = ({
const providerUrls: { [key: string]: string } = {
github: `https://github.com/apps/${github_app_name}/installations/new?state=${workspaceSlug?.toString()}`,
slack: `https://slack.com/oauth/v2/authorize?scope=chat:write,im:history,im:write,links:read,links:write,users:read,users:read.email&amp;user_scope=&amp;&client_id=${slack_client_id}&state=${workspaceSlug?.toString()}`,
slack: `https://slack.com/oauth/v2/authorize?scope=incoming-webhook,chat:write,im:history,im:write,links:read,links:write,users:read,users:read.email&amp;user_scope=&amp;&client_id=${slack_client_id}&state=${workspaceSlug?.toString()}`,
slackChannel: `https://slack.com/oauth/v2/authorize?scope=incoming-webhook&client_id=${slack_client_id}&state=${workspaceSlug?.toString()},${projectId?.toString()}${
stateParams ? "," + stateParams : ""
}`,

View File

@@ -2790,7 +2790,7 @@
dependencies:
"@types/react" "*"
"@types/react@*", "@types/react@^18.2.39", "@types/react@^18.2.42":
"@types/react@*", "@types/react@18.2.42", "@types/react@^18.2.39", "@types/react@^18.2.42":
version "18.2.42"
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.2.42.tgz#6f6b11a904f6d96dda3c2920328a97011a00aba7"
integrity sha512-c1zEr96MjakLYus/wPnuWDo1/zErfdU9rNsIGmE+NV71nx88FG9Ttgo5dqorXTu/LImX2f63WBP986gJkMPNbA==