forked from github/plane
Compare commits
78 Commits
fix/issue-
...
chore/spac
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bc8d51dd97 | ||
|
|
3c80b87365 | ||
|
|
8a8eea38f9 | ||
|
|
b527ec582f | ||
|
|
fa990ed444 | ||
|
|
79fa860b28 | ||
|
|
f2b3a645bd | ||
|
|
4cd70f3b73 | ||
|
|
59110c7205 | ||
|
|
9756abece4 | ||
|
|
f4aa059da6 | ||
|
|
d22c258c00 | ||
|
|
65253c6383 | ||
|
|
8c462f96ee | ||
|
|
332e56bb0d | ||
|
|
eca96da837 | ||
|
|
5446447231 | ||
|
|
dec1fb5a63 | ||
|
|
2b63827caa | ||
|
|
5993fc38f0 | ||
|
|
ab37ce979a | ||
|
|
c6764111a3 | ||
|
|
4a2f8d93b1 | ||
|
|
5db9b2b638 | ||
|
|
e2b704ea6f | ||
|
|
452c9f0d5b | ||
|
|
139c0857eb | ||
|
|
2556d052de | ||
|
|
c65915665b | ||
|
|
6ece5a57e2 | ||
|
|
a779fa1497 | ||
|
|
5eb7740833 | ||
|
|
86408fcd46 | ||
|
|
86de3188b1 | ||
|
|
c5996382bc | ||
|
|
2796754e5f | ||
|
|
3488dc3014 | ||
|
|
c7a3d8f0a0 | ||
|
|
6d97dd814a | ||
|
|
804c03bd15 | ||
|
|
59981e3e68 | ||
|
|
1be1b9f4a3 | ||
|
|
7cd02401c1 | ||
|
|
6d175c0e56 | ||
|
|
6a4f521b47 | ||
|
|
cdc4fd27a5 | ||
|
|
f4af3db7b6 | ||
|
|
4f64f431a7 | ||
|
|
537901f046 | ||
|
|
404a61aee5 | ||
|
|
1a2b1e648d | ||
|
|
3400c119bc | ||
|
|
d5853405ca | ||
|
|
db510dcfcd | ||
|
|
62c0615012 | ||
|
|
3914a75334 | ||
|
|
0cbb201348 | ||
|
|
f7264364bd | ||
|
|
35f8ffa5ab | ||
|
|
6607caade7 | ||
|
|
8ee8270697 | ||
|
|
41ab962dd7 | ||
|
|
c22c6bb9b2 | ||
|
|
67de6d0729 | ||
|
|
f361cd045e | ||
|
|
06d3cd7e73 | ||
|
|
10c52bf89b | ||
|
|
b717518fbe | ||
|
|
f48cd6f50c | ||
|
|
3203ae6549 | ||
|
|
b8f603f920 | ||
|
|
96ff76af94 | ||
|
|
830675741f | ||
|
|
e489ad50dc | ||
|
|
3dc18bc8fd | ||
|
|
2d04917951 | ||
|
|
0ceb9974f6 | ||
|
|
e0fcc0c876 |
102
.github/workflows/build-branch.yml
vendored
102
.github/workflows/build-branch.yml
vendored
@@ -7,6 +7,7 @@ on:
|
||||
branches:
|
||||
- master
|
||||
- release
|
||||
- preview
|
||||
- qa
|
||||
- develop
|
||||
|
||||
@@ -14,7 +15,7 @@ env:
|
||||
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
|
||||
|
||||
jobs:
|
||||
branch_build_and_push:
|
||||
branch_build_setup:
|
||||
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
|
||||
name: Build-Push Web/Space/API/Proxy Docker Image
|
||||
runs-on: ubuntu-20.04
|
||||
@@ -23,39 +24,6 @@ jobs:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3.3.0
|
||||
|
||||
# - name: Set Target Branch Name on PR close
|
||||
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
|
||||
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
|
||||
|
||||
# - name: Set Target Branch Name on other than PR close
|
||||
# if: ${{ github.event_name == 'push' }}
|
||||
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
|
||||
|
||||
- uses: ASzc/change-string-case-action@v2
|
||||
id: gh_branch_upper_lower
|
||||
with:
|
||||
string: ${{env.TARGET_BRANCH}}
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_replace_slash
|
||||
with:
|
||||
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
|
||||
find: "/"
|
||||
replace: "-"
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_replace_dot
|
||||
with:
|
||||
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
|
||||
find: "."
|
||||
replace: ""
|
||||
|
||||
- uses: mad9000/actions-find-and-replace-string@2
|
||||
id: gh_branch_clean
|
||||
with:
|
||||
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
|
||||
find: "_"
|
||||
replace: ""
|
||||
- name: Uploading Proxy Source
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
@@ -87,12 +55,24 @@ jobs:
|
||||
!./deploy
|
||||
!./web
|
||||
outputs:
|
||||
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
|
||||
gh_branch_name: ${{ env.TARGET_BRANCH }}
|
||||
|
||||
branch_build_push_frontend:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_and_push]
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
steps:
|
||||
- name: Set Frontend Docker Tag
|
||||
run: |
|
||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:latest
|
||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:preview
|
||||
else
|
||||
TAG=${{ env.FRONTEND_TAG }}
|
||||
fi
|
||||
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
@@ -112,7 +92,7 @@ jobs:
|
||||
context: .
|
||||
file: ./web/Dockerfile.web
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
tags: ${{ env.FRONTEND_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
@@ -121,8 +101,20 @@ jobs:
|
||||
|
||||
branch_build_push_space:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_and_push]
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:preview
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
fi
|
||||
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
@@ -142,7 +134,7 @@ jobs:
|
||||
context: .
|
||||
file: ./space/Dockerfile.space
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
tags: ${{ env.SPACE_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
@@ -151,8 +143,20 @@ jobs:
|
||||
|
||||
branch_build_push_backend:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_and_push]
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:latest
|
||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:preview
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
fi
|
||||
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
@@ -173,7 +177,7 @@ jobs:
|
||||
file: ./Dockerfile.api
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
tags: ${{ env.BACKEND_TAG }}
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
@@ -181,8 +185,20 @@ jobs:
|
||||
|
||||
branch_build_push_proxy:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_and_push]
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_setup.outputs.gh_branch_name }}
|
||||
steps:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:latest
|
||||
elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "release" ] || [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "preview" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:preview,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:preview
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
fi
|
||||
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2.5.0
|
||||
|
||||
@@ -203,7 +219,7 @@ jobs:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64
|
||||
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
|
||||
tags: ${{ env.PROXY_TAG }}
|
||||
push: true
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
@@ -76,7 +76,6 @@ NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
# Backend
|
||||
# Debug value for api server use it as 0 for production use
|
||||
DEBUG=0
|
||||
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted" # deprecated
|
||||
|
||||
# Error logs
|
||||
SENTRY_DSN=""
|
||||
|
||||
@@ -57,10 +57,6 @@ Setting up local environment is extremely easy and straight forward. Follow the
|
||||
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
|
||||
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
|
||||
|
||||
```bash
|
||||
./setup.sh
|
||||
```
|
||||
|
||||
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
|
||||
|
||||
Thats it!
|
||||
|
||||
@@ -14,6 +14,11 @@ PGHOST="plane-db"
|
||||
PGDATABASE="plane"
|
||||
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
|
||||
# Oauth variables
|
||||
GOOGLE_CLIENT_ID=""
|
||||
GITHUB_CLIENT_ID=""
|
||||
GITHUB_CLIENT_SECRET=""
|
||||
|
||||
# Redis Settings
|
||||
REDIS_HOST="plane-redis"
|
||||
REDIS_PORT="6379"
|
||||
@@ -50,7 +55,6 @@ NGINX_PORT=80
|
||||
# SignUps
|
||||
ENABLE_SIGNUP="1"
|
||||
|
||||
|
||||
# Enable Email/Password Signup
|
||||
ENABLE_EMAIL_PASSWORD="1"
|
||||
|
||||
|
||||
@@ -3,16 +3,26 @@ set -e
|
||||
python manage.py wait_for_db
|
||||
python manage.py migrate
|
||||
|
||||
# Set default value for ENABLE_REGISTRATION
|
||||
ENABLE_REGISTRATION=${ENABLE_REGISTRATION:-1}
|
||||
# Create the default bucket
|
||||
#!/bin/bash
|
||||
|
||||
# Check if ENABLE_REGISTRATION is not set to '0'
|
||||
if [ "$ENABLE_REGISTRATION" != "0" ]; then
|
||||
# Register instance
|
||||
python manage.py register_instance
|
||||
# Load the configuration variable
|
||||
python manage.py configure_instance
|
||||
fi
|
||||
# Collect system information
|
||||
HOSTNAME=$(hostname)
|
||||
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
|
||||
CPU_INFO=$(cat /proc/cpuinfo)
|
||||
MEMORY_INFO=$(free -h)
|
||||
DISK_INFO=$(df -h)
|
||||
|
||||
# Concatenate information and compute SHA-256 hash
|
||||
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
|
||||
|
||||
# Export the variables
|
||||
export MACHINE_SIGNATURE=$SIGNATURE
|
||||
|
||||
# Register instance
|
||||
python manage.py register_instance $MACHINE_SIGNATURE
|
||||
# Load the configuration variable
|
||||
python manage.py configure_instance
|
||||
|
||||
# Create the default bucket
|
||||
python manage.py create_bucket
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
from django.utils import timezone
|
||||
from rest_framework.throttling import SimpleRateThrottle
|
||||
|
||||
|
||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
scope = 'api_key'
|
||||
rate = '60/minute'
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
@@ -15,13 +14,10 @@ class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
return f'{self.scope}:{api_key}'
|
||||
|
||||
def allow_request(self, request, view):
|
||||
# Calculate the current time as a Unix timestamp
|
||||
now = timezone.now().timestamp()
|
||||
|
||||
# Use the parent class's method to check if the request is allowed
|
||||
allowed = super().allow_request(request, view)
|
||||
|
||||
if allowed:
|
||||
now = self.timer()
|
||||
# Calculate the remaining limit and reset time
|
||||
history = self.cache.get(self.key, [])
|
||||
|
||||
|
||||
@@ -21,7 +21,8 @@ from plane.db.models import (
|
||||
from .base import BaseSerializer
|
||||
from .cycle import CycleSerializer, CycleLiteSerializer
|
||||
from .module import ModuleSerializer, ModuleLiteSerializer
|
||||
|
||||
from .user import UserLiteSerializer
|
||||
from .state import StateLiteSerializer
|
||||
|
||||
class IssueSerializer(BaseSerializer):
|
||||
assignees = serializers.ListField(
|
||||
@@ -331,12 +332,23 @@ class ModuleIssueSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueExpandSerializer(BaseSerializer):
|
||||
# Serialize the related cycle. It's a OneToOne relation.
|
||||
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
||||
class LabelLiteSerializer(BaseSerializer):
|
||||
|
||||
# Serialize the related module. It's a OneToOne relation.
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = [
|
||||
"id",
|
||||
"name",
|
||||
"color",
|
||||
]
|
||||
|
||||
|
||||
class IssueExpandSerializer(BaseSerializer):
|
||||
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
|
||||
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
|
||||
labels = LabelLiteSerializer(read_only=True, many=True)
|
||||
assignees = UserLiteSerializer(read_only=True, many=True)
|
||||
state = StateLiteSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
@@ -349,4 +361,4 @@ class IssueExpandSerializer(BaseSerializer):
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
]
|
||||
|
||||
@@ -55,7 +55,6 @@ class ModuleSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError("Start date cannot exceed target date")
|
||||
|
||||
if data.get("members", []):
|
||||
print(data.get("members"))
|
||||
data["members"] = ProjectMember.objects.filter(
|
||||
project_id=self.context.get("project_id"),
|
||||
member_id__in=data["members"],
|
||||
|
||||
@@ -11,10 +11,6 @@ class UserLiteSerializer(BaseSerializer):
|
||||
"first_name",
|
||||
"last_name",
|
||||
"avatar",
|
||||
"is_bot",
|
||||
"display_name",
|
||||
]
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"is_bot",
|
||||
]
|
||||
read_only_fields = fields
|
||||
@@ -10,7 +10,7 @@ urlpatterns = [
|
||||
name="inbox-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:pk>/",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
|
||||
InboxIssueAPIEndpoint.as_view(),
|
||||
name="inbox-issue",
|
||||
),
|
||||
|
||||
@@ -60,9 +60,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, pk=None):
|
||||
if pk:
|
||||
inbox_issue_queryset = self.get_queryset().get(pk=pk)
|
||||
def get(self, request, slug, project_id, issue_id=None):
|
||||
if issue_id:
|
||||
inbox_issue_queryset = self.get_queryset().get(issue_id=issue_id)
|
||||
inbox_issue_data = InboxIssueSerializer(
|
||||
inbox_issue_queryset,
|
||||
fields=self.fields,
|
||||
@@ -163,7 +163,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
serializer = InboxIssueSerializer(inbox_issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request, slug, project_id, pk):
|
||||
def patch(self, request, slug, project_id, issue_id):
|
||||
inbox = Inbox.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
@@ -184,7 +184,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Get the inbox issue
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk,
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
inbox_id=inbox.id,
|
||||
@@ -212,7 +212,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id
|
||||
pk=issue_id, workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
@@ -236,7 +236,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
type="issue.activity.updated",
|
||||
requested_data=requested_data,
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps(
|
||||
IssueSerializer(current_instance).data,
|
||||
@@ -261,7 +261,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
# Update the issue state if the issue is rejected or marked as duplicate
|
||||
if serializer.data["status"] in [-1, 2]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
pk=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -275,7 +275,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
# Update the issue state if it is accepted
|
||||
if serializer.data["status"] in [1]:
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
pk=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
@@ -297,7 +297,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
InboxIssueSerializer(inbox_issue).data, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
def delete(self, request, slug, project_id, pk):
|
||||
def delete(self, request, slug, project_id, issue_id):
|
||||
inbox = Inbox.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).first()
|
||||
@@ -318,7 +318,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
|
||||
# Get the inbox issue
|
||||
inbox_issue = InboxIssue.objects.get(
|
||||
pk=pk,
|
||||
issue_id=issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
inbox_id=inbox.id,
|
||||
@@ -345,7 +345,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
|
||||
if inbox_issue.status in [-2, -1, 0, 2]:
|
||||
# Delete the issue also
|
||||
Issue.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id, pk=inbox_issue.issue_id
|
||||
workspace__slug=slug, project_id=project_id, pk=issue_id
|
||||
).delete()
|
||||
|
||||
inbox_issue.delete()
|
||||
|
||||
@@ -99,7 +99,6 @@ class WorkspaceViewerPermission(BasePermission):
|
||||
return WorkspaceMember.objects.filter(
|
||||
member=request.user,
|
||||
workspace__slug=view.workspace_slug,
|
||||
role__gte=10,
|
||||
is_active=True,
|
||||
).exists()
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
from django.utils import timezone
|
||||
from rest_framework.throttling import SimpleRateThrottle
|
||||
|
||||
|
||||
class ApiKeyRateThrottle(SimpleRateThrottle):
|
||||
scope = 'api_key'
|
||||
|
||||
def get_cache_key(self, request, view):
|
||||
# Retrieve the API key from the request header
|
||||
api_key = request.headers.get('X-Api-Key')
|
||||
if not api_key:
|
||||
return None # Allow the request if there's no API key
|
||||
|
||||
# Use the API key as part of the cache key
|
||||
return f'{self.scope}:{api_key}'
|
||||
|
||||
def allow_request(self, request, view):
|
||||
# Calculate the current time as a Unix timestamp
|
||||
now = timezone.now().timestamp()
|
||||
|
||||
# Use the parent class's method to check if the request is allowed
|
||||
allowed = super().allow_request(request, view)
|
||||
|
||||
if allowed:
|
||||
# Calculate the remaining limit and reset time
|
||||
history = self.cache.get(self.key, [])
|
||||
|
||||
# Remove old histories
|
||||
while history and history[-1] <= now - self.duration:
|
||||
history.pop()
|
||||
|
||||
# Calculate the requests
|
||||
num_requests = len(history)
|
||||
|
||||
# Check available requests
|
||||
available = self.num_requests - num_requests
|
||||
|
||||
# Unix timestamp for when the rate limit will reset
|
||||
reset_time = int(now + self.duration)
|
||||
|
||||
# Add headers
|
||||
request.META['X-RateLimit-Remaining'] = max(0, available)
|
||||
request.META['X-RateLimit-Reset'] = reset_time
|
||||
|
||||
return allowed
|
||||
@@ -26,6 +26,8 @@ class UserSerializer(BaseSerializer):
|
||||
"token_updated_at",
|
||||
"is_onboarded",
|
||||
"is_bot",
|
||||
"is_password_autoset",
|
||||
"is_email_verified",
|
||||
]
|
||||
extra_kwargs = {"password": {"write_only": True}}
|
||||
|
||||
@@ -60,6 +62,8 @@ class UserMeSerializer(BaseSerializer):
|
||||
"theme",
|
||||
"last_workspace_id",
|
||||
"use_case",
|
||||
"is_password_autoset",
|
||||
"is_email_verified",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
@@ -80,9 +84,18 @@ class UserMeSettingsSerializer(BaseSerializer):
|
||||
workspace_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=obj.email
|
||||
).count()
|
||||
if obj.last_workspace_id is not None:
|
||||
if (
|
||||
obj.last_workspace_id is not None
|
||||
and Workspace.objects.filter(
|
||||
pk=obj.last_workspace_id,
|
||||
workspace_member__member=obj.id,
|
||||
workspace_member__is_active=True,
|
||||
).exists()
|
||||
):
|
||||
workspace = Workspace.objects.filter(
|
||||
pk=obj.last_workspace_id, workspace_member__member=obj.id
|
||||
pk=obj.last_workspace_id,
|
||||
workspace_member__member=obj.id,
|
||||
workspace_member__is_active=True,
|
||||
).first()
|
||||
return {
|
||||
"last_workspace_id": obj.last_workspace_id,
|
||||
@@ -95,7 +108,9 @@ class UserMeSettingsSerializer(BaseSerializer):
|
||||
}
|
||||
else:
|
||||
fallback_workspace = (
|
||||
Workspace.objects.filter(workspace_member__member_id=obj.id)
|
||||
Workspace.objects.filter(
|
||||
workspace_member__member_id=obj.id, workspace_member__is_active=True
|
||||
)
|
||||
.order_by("created_at")
|
||||
.first()
|
||||
)
|
||||
@@ -154,24 +169,25 @@ class ChangePasswordSerializer(serializers.Serializer):
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
old_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True)
|
||||
confirm_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True, min_length=8)
|
||||
confirm_password = serializers.CharField(required=True, min_length=8)
|
||||
|
||||
def validate(self, data):
|
||||
if data.get("old_password") == data.get("new_password"):
|
||||
raise serializers.ValidationError({"error": "New password cannot be same as old password."})
|
||||
raise serializers.ValidationError(
|
||||
{"error": "New password cannot be same as old password."}
|
||||
)
|
||||
|
||||
if data.get("new_password") != data.get("confirm_password"):
|
||||
raise serializers.ValidationError({"error": "Confirm password should be same as the new password."})
|
||||
raise serializers.ValidationError(
|
||||
{"error": "Confirm password should be same as the new password."}
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class ResetPasswordSerializer(serializers.Serializer):
|
||||
model = User
|
||||
|
||||
"""
|
||||
Serializer for password change endpoint.
|
||||
"""
|
||||
new_password = serializers.CharField(required=True)
|
||||
confirm_password = serializers.CharField(required=True)
|
||||
new_password = serializers.CharField(required=True, min_length=8)
|
||||
|
||||
@@ -14,9 +14,9 @@ from plane.db.models.webhook import validate_domain, validate_schema
|
||||
|
||||
class WebhookSerializer(DynamicBaseSerializer):
|
||||
url = serializers.URLField(validators=[validate_schema, validate_domain])
|
||||
|
||||
def validate(self, data):
|
||||
url = data.get("url", None)
|
||||
|
||||
def create(self, validated_data):
|
||||
url = validated_data.get("url", None)
|
||||
|
||||
# Extract the hostname from the URL
|
||||
hostname = urlparse(url).hostname
|
||||
@@ -48,8 +48,42 @@ class WebhookSerializer(DynamicBaseSerializer):
|
||||
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
||||
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
||||
|
||||
return data
|
||||
return Webhook.objects.create(**validated_data)
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
url = validated_data.get("url", None)
|
||||
if url:
|
||||
# Extract the hostname from the URL
|
||||
hostname = urlparse(url).hostname
|
||||
if not hostname:
|
||||
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
|
||||
|
||||
# Resolve the hostname to IP addresses
|
||||
try:
|
||||
ip_addresses = socket.getaddrinfo(hostname, None)
|
||||
except socket.gaierror:
|
||||
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
|
||||
|
||||
if not ip_addresses:
|
||||
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
|
||||
|
||||
for addr in ip_addresses:
|
||||
ip = ipaddress.ip_address(addr[4][0])
|
||||
if ip.is_private or ip.is_loopback:
|
||||
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
|
||||
|
||||
# Additional validation for multiple request domains and their subdomains
|
||||
request = self.context.get('request')
|
||||
disallowed_domains = ['plane.so',] # Add your disallowed domains here
|
||||
if request:
|
||||
request_host = request.get_host().split(':')[0] # Remove port if present
|
||||
disallowed_domains.append(request_host)
|
||||
|
||||
# Check if hostname is a subdomain or exact match of any disallowed domain
|
||||
if any(hostname == domain or hostname.endswith('.' + domain) for domain in disallowed_domains):
|
||||
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
|
||||
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
class Meta:
|
||||
model = Webhook
|
||||
|
||||
@@ -34,6 +34,7 @@ class WorkSpaceSerializer(BaseSerializer):
|
||||
"profile",
|
||||
"spaces",
|
||||
"workspace-invitations",
|
||||
"password",
|
||||
]:
|
||||
raise serializers.ValidationError({"slug": "Slug is not valid"})
|
||||
|
||||
|
||||
@@ -5,18 +5,15 @@ from rest_framework_simplejwt.views import TokenRefreshView
|
||||
|
||||
from plane.app.views import (
|
||||
# Authentication
|
||||
SignUpEndpoint,
|
||||
SignInEndpoint,
|
||||
SignOutEndpoint,
|
||||
MagicSignInEndpoint,
|
||||
MagicSignInGenerateEndpoint,
|
||||
OauthEndpoint,
|
||||
EmailCheckEndpoint,
|
||||
## End Authentication
|
||||
# Auth Extended
|
||||
ForgotPasswordEndpoint,
|
||||
VerifyEmailEndpoint,
|
||||
ResetPasswordEndpoint,
|
||||
RequestEmailVerificationEndpoint,
|
||||
ChangePasswordEndpoint,
|
||||
## End Auth Extender
|
||||
# API Tokens
|
||||
@@ -27,24 +24,14 @@ from plane.app.views import (
|
||||
|
||||
urlpatterns = [
|
||||
# Social Auth
|
||||
path("email-check/", EmailCheckEndpoint.as_view(), name="email"),
|
||||
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
|
||||
# Auth
|
||||
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
|
||||
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
|
||||
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
|
||||
# Magic Sign In/Up
|
||||
path(
|
||||
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
|
||||
),
|
||||
# magic sign in
|
||||
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
|
||||
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
|
||||
# Email verification
|
||||
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
|
||||
path(
|
||||
"request-email-verify/",
|
||||
RequestEmailVerificationEndpoint.as_view(),
|
||||
name="request-reset-email",
|
||||
),
|
||||
# Password Manipulation
|
||||
path(
|
||||
"users/me/change-password/",
|
||||
|
||||
@@ -7,7 +7,6 @@ from plane.app.views import (
|
||||
CycleDateCheckEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleIssueGroupedEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -44,11 +43,6 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue-cycle",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
|
||||
CycleIssueGroupedEndpoint.as_view(),
|
||||
name="project-issue-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
|
||||
CycleIssueViewSet.as_view(
|
||||
|
||||
@@ -3,8 +3,6 @@ from django.urls import path
|
||||
|
||||
from plane.app.views import (
|
||||
IssueViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueListGroupedEndpoint,
|
||||
LabelViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
BulkDeleteIssuesEndpoint,
|
||||
@@ -37,16 +35,6 @@ urlpatterns = [
|
||||
),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueListEndpoint.as_view(),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueListGroupedEndpoint.as_view(),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
|
||||
IssueViewSet.as_view(
|
||||
|
||||
@@ -7,7 +7,6 @@ from plane.app.views import (
|
||||
ModuleLinkViewSet,
|
||||
ModuleFavoriteViewSet,
|
||||
BulkImportModulesEndpoint,
|
||||
ModuleIssueGroupedEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@@ -44,11 +43,6 @@ urlpatterns = [
|
||||
),
|
||||
name="project-module-issues",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
|
||||
ModuleIssueGroupedEndpoint.as_view(),
|
||||
name="project-issue-cycle",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
|
||||
ModuleIssueViewSet.as_view(
|
||||
|
||||
@@ -7,6 +7,7 @@ from plane.app.views import (
|
||||
UpdateUserTourCompletedEndpoint,
|
||||
UserActivityEndpoint,
|
||||
ChangePasswordEndpoint,
|
||||
SetUserPasswordEndpoint,
|
||||
## End User
|
||||
## Workspaces
|
||||
UserWorkSpacesEndpoint,
|
||||
@@ -89,5 +90,10 @@ urlpatterns = [
|
||||
UserWorkspaceDashboardEndpoint.as_view(),
|
||||
name="user-workspace-dashboard",
|
||||
),
|
||||
path(
|
||||
"users/me/set-password/",
|
||||
SetUserPasswordEndpoint.as_view(),
|
||||
name="set-password",
|
||||
),
|
||||
## End User Graph
|
||||
]
|
||||
|
||||
@@ -5,7 +5,7 @@ from plane.app.views import (
|
||||
IssueViewViewSet,
|
||||
GlobalViewViewSet,
|
||||
GlobalViewIssuesViewSet,
|
||||
IssueViewFavoriteViewSet,
|
||||
IssueViewFavoriteViewSet,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ from plane.app.views import (
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
WorkspaceUserProfileIssuesGroupedEndpoint
|
||||
)
|
||||
|
||||
|
||||
@@ -190,11 +189,6 @@ urlpatterns = [
|
||||
WorkspaceUserProfileIssuesEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/user-issues/<uuid:user_id>/",
|
||||
WorkspaceUserProfileIssuesGroupedEndpoint.as_view(),
|
||||
name="workspace-user-profile-issues",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/labels/",
|
||||
WorkspaceLabelsEndpoint.as_view(),
|
||||
|
||||
@@ -44,7 +44,6 @@ from .workspace import (
|
||||
WorkspaceUserProfileEndpoint,
|
||||
WorkspaceUserProfileIssuesEndpoint,
|
||||
WorkspaceLabelsEndpoint,
|
||||
WorkspaceUserProfileIssuesGroupedEndpoint
|
||||
)
|
||||
from .state import StateViewSet
|
||||
from .view import (
|
||||
@@ -59,13 +58,10 @@ from .cycle import (
|
||||
CycleDateCheckEndpoint,
|
||||
CycleFavoriteViewSet,
|
||||
TransferCycleIssueEndpoint,
|
||||
CycleIssueGroupedEndpoint,
|
||||
)
|
||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
||||
from .issue import (
|
||||
IssueViewSet,
|
||||
IssueListEndpoint,
|
||||
IssueListGroupedEndpoint,
|
||||
WorkSpaceIssuesEndpoint,
|
||||
IssueActivityEndpoint,
|
||||
IssueCommentViewSet,
|
||||
@@ -86,20 +82,18 @@ from .issue import (
|
||||
)
|
||||
|
||||
from .auth_extended import (
|
||||
VerifyEmailEndpoint,
|
||||
RequestEmailVerificationEndpoint,
|
||||
ForgotPasswordEndpoint,
|
||||
ResetPasswordEndpoint,
|
||||
ChangePasswordEndpoint,
|
||||
SetUserPasswordEndpoint,
|
||||
EmailCheckEndpoint,
|
||||
)
|
||||
|
||||
|
||||
from .authentication import (
|
||||
SignUpEndpoint,
|
||||
SignInEndpoint,
|
||||
SignOutEndpoint,
|
||||
MagicSignInEndpoint,
|
||||
MagicSignInGenerateEndpoint,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
@@ -107,7 +101,6 @@ from .module import (
|
||||
ModuleIssueViewSet,
|
||||
ModuleLinkViewSet,
|
||||
ModuleFavoriteViewSet,
|
||||
ModuleIssueGroupedEndpoint,
|
||||
)
|
||||
|
||||
from .api import ApiTokenEndpoint
|
||||
@@ -169,4 +162,8 @@ from .exporter import ExportIssuesEndpoint
|
||||
|
||||
from .config import ConfigurationEndpoint
|
||||
|
||||
from .webhook import WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint
|
||||
from .webhook import (
|
||||
WebhookEndpoint,
|
||||
WebhookLogsEndpoint,
|
||||
WebhookSecretRegenerateEndpoint,
|
||||
)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
## Python imports
|
||||
import jwt
|
||||
import uuid
|
||||
import os
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
|
||||
## Django imports
|
||||
from django.contrib.auth.tokens import PasswordResetTokenGenerator
|
||||
@@ -8,80 +12,114 @@ from django.utils.encoding import (
|
||||
smart_bytes,
|
||||
DjangoUnicodeDecodeError,
|
||||
)
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
|
||||
from django.core.validators import validate_email
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.conf import settings
|
||||
|
||||
## Third Party Imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import permissions
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
## Module imports
|
||||
from . import BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
ChangePasswordSerializer,
|
||||
ResetPasswordSerializer,
|
||||
UserSerializer,
|
||||
)
|
||||
from plane.db.models import User
|
||||
from plane.bgtasks.email_verification_task import email_verification
|
||||
from plane.db.models import User, WorkspaceMemberInvite
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.forgot_password_task import forgot_password
|
||||
from plane.license.models import Instance, InstanceConfiguration
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.bgtasks.magic_link_code_task import magic_link
|
||||
from plane.bgtasks.user_count_task import update_user_instance_user_count
|
||||
from plane.bgtasks.event_tracking_task import auth_events
|
||||
|
||||
def get_tokens_for_user(user):
|
||||
refresh = RefreshToken.for_user(user)
|
||||
return (
|
||||
str(refresh.access_token),
|
||||
str(refresh),
|
||||
)
|
||||
|
||||
|
||||
class RequestEmailVerificationEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
token = RefreshToken.for_user(request.user).access_token
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
email_verification.delay(
|
||||
request.user.first_name, request.user.email, token, current_site
|
||||
)
|
||||
return Response(
|
||||
{"message": "Email sent successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
def generate_magic_token(email):
|
||||
key = "magic_" + str(email)
|
||||
|
||||
## Generate a random token
|
||||
token = (
|
||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
)
|
||||
|
||||
# Initialize the redis instance
|
||||
ri = redis_instance()
|
||||
|
||||
# Check if the key already exists in python
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
current_attempt = data["current_attempt"] + 1
|
||||
|
||||
if data["current_attempt"] > 2:
|
||||
return key, token, False
|
||||
|
||||
value = {
|
||||
"current_attempt": current_attempt,
|
||||
"email": email,
|
||||
"token": token,
|
||||
}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
else:
|
||||
value = {"current_attempt": 0, "email": email, "token": token}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
return key, token, True
|
||||
|
||||
|
||||
class VerifyEmailEndpoint(BaseAPIView):
|
||||
def get(self, request):
|
||||
token = request.GET.get("token")
|
||||
try:
|
||||
payload = jwt.decode(token, settings.SECRET_KEY, algorithms="HS256")
|
||||
user = User.objects.get(id=payload["user_id"])
|
||||
def generate_password_token(user):
|
||||
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
||||
token = PasswordResetTokenGenerator().make_token(user)
|
||||
|
||||
if not user.is_email_verified:
|
||||
user.is_email_verified = True
|
||||
user.save()
|
||||
return Response(
|
||||
{"email": "Successfully activated"}, status=status.HTTP_200_OK
|
||||
)
|
||||
except jwt.ExpiredSignatureError as _indentifier:
|
||||
return Response(
|
||||
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
except jwt.exceptions.DecodeError as _indentifier:
|
||||
return Response(
|
||||
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
return uidb64, token
|
||||
|
||||
|
||||
class ForgotPasswordEndpoint(BaseAPIView):
|
||||
permission_classes = [permissions.AllowAny]
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
email = request.data.get("email")
|
||||
|
||||
if User.objects.filter(email=email).exists():
|
||||
user = User.objects.get(email=email)
|
||||
uidb64 = urlsafe_base64_encode(smart_bytes(user.id))
|
||||
token = PasswordResetTokenGenerator().make_token(user)
|
||||
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
return Response({"error": "Please enter a valid email"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Get the user
|
||||
user = User.objects.filter(email=email).first()
|
||||
if user:
|
||||
# Get the reset token for user
|
||||
uidb64, token = get_tokens_for_user(user=user)
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
# send the forgot password email
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"message": "Check your email to reset your password"},
|
||||
status=status.HTTP_200_OK,
|
||||
@@ -92,30 +130,38 @@ class ForgotPasswordEndpoint(BaseAPIView):
|
||||
|
||||
|
||||
class ResetPasswordEndpoint(BaseAPIView):
|
||||
permission_classes = [permissions.AllowAny]
|
||||
permission_classes = [AllowAny,]
|
||||
|
||||
def post(self, request, uidb64, token):
|
||||
try:
|
||||
# Decode the id from the uidb64
|
||||
id = smart_str(urlsafe_base64_decode(uidb64))
|
||||
user = User.objects.get(id=id)
|
||||
|
||||
# check if the token is valid for the user
|
||||
if not PasswordResetTokenGenerator().check_token(user, token):
|
||||
return Response(
|
||||
{"error": "token is not valid, please check the new one"},
|
||||
{"error": "Token is invalid"},
|
||||
status=status.HTTP_401_UNAUTHORIZED,
|
||||
)
|
||||
serializer = ResetPasswordSerializer(data=request.data)
|
||||
|
||||
# Reset the password
|
||||
serializer = ResetPasswordSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
# set_password also hashes the password that the user will get
|
||||
user.set_password(serializer.data.get("new_password"))
|
||||
user.is_password_autoset = False
|
||||
user.save()
|
||||
response = {
|
||||
"status": "success",
|
||||
"code": status.HTTP_200_OK,
|
||||
"message": "Password updated successfully",
|
||||
|
||||
# Log the user in
|
||||
# Generate access token for the user
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
return Response(response)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
except DjangoUnicodeDecodeError as indentifier:
|
||||
@@ -128,7 +174,6 @@ class ResetPasswordEndpoint(BaseAPIView):
|
||||
class ChangePasswordEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
serializer = ChangePasswordSerializer(data=request.data)
|
||||
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
if serializer.is_valid():
|
||||
if not user.check_password(serializer.data.get("old_password")):
|
||||
@@ -138,6 +183,207 @@ class ChangePasswordEndpoint(BaseAPIView):
|
||||
)
|
||||
# set_password also hashes the password that the user will get
|
||||
user.set_password(serializer.data.get("new_password"))
|
||||
user.is_password_autoset = False
|
||||
user.save()
|
||||
return Response({"message": "Password updated successfully"}, status=status.HTTP_200_OK)
|
||||
return Response(
|
||||
{"message": "Password updated successfully"}, status=status.HTTP_200_OK
|
||||
)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class SetUserPasswordEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
# If the user password is not autoset then return error
|
||||
if not user.is_password_autoset:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your password is already set please change your password from profile"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check password validation
|
||||
if not password and len(str(password)) < 8:
|
||||
return Response(
|
||||
{"error": "Password is not valid"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Set the user password
|
||||
user.set_password(password)
|
||||
user.is_password_autoset = False
|
||||
user.save()
|
||||
serializer = UserSerializer(user)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class EmailCheckEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the configurations
|
||||
instance_configuration = InstanceConfiguration.objects.values("key", "value")
|
||||
|
||||
email = request.data.get("email", False)
|
||||
type = request.data.get("type", "magic_code")
|
||||
|
||||
if not email:
|
||||
return Response({"error": "Email is required"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# validate the email
|
||||
try:
|
||||
validate_email(email)
|
||||
except ValidationError:
|
||||
return Response({"error": "Email is not valid"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Check if the user exists
|
||||
user = User.objects.filter(email=email).first()
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
|
||||
# If new user
|
||||
if user is None:
|
||||
# Create the user
|
||||
if (
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
== "0"
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=email,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Create the user with default values
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
|
||||
# Update instance user count
|
||||
update_user_instance_user_count.delay()
|
||||
|
||||
# Case when the user selects magic code
|
||||
if type == "magic_code":
|
||||
if not bool(get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_MAGIC_LINK_LOGIN",
|
||||
os.environ.get("ENABLE_MAGIC_LINK_LOGIN")),
|
||||
):
|
||||
return Response(
|
||||
{"error": "Magic link sign in is disabled."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="MAGIC_LINK",
|
||||
first_time=True,
|
||||
)
|
||||
key, token, current_attempt = generate_magic_token(email=email)
|
||||
if not current_attempt:
|
||||
return Response({"error": "Max attempts exhausted. Please try again later."}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# Trigger the email
|
||||
magic_link.delay(email, "magic_" + str(email), token, current_site)
|
||||
return Response({"is_password_autoset": user.is_password_autoset}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
# Get the uidb64 and token for the user
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
)
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="EMAIL",
|
||||
first_time=True,
|
||||
)
|
||||
# Automatically send the email
|
||||
return Response({"is_password_autoset": user.is_password_autoset}, status=status.HTTP_200_OK)
|
||||
# Existing user
|
||||
else:
|
||||
if type == "magic_code":
|
||||
## Generate a random token
|
||||
if not bool(get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_MAGIC_LINK_LOGIN",
|
||||
os.environ.get("ENABLE_MAGIC_LINK_LOGIN")),
|
||||
):
|
||||
return Response(
|
||||
{"error": "Magic link sign in is disabled."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="MAGIC_LINK",
|
||||
first_time=False,
|
||||
)
|
||||
|
||||
# Generate magic token
|
||||
key, token, current_attempt = generate_magic_token(email=email)
|
||||
if not current_attempt:
|
||||
return Response({"error": "Max attempts exhausted. Please try again later."}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Trigger the email
|
||||
magic_link.delay(email, key, token, current_site)
|
||||
return Response({"is_password_autoset": user.is_password_autoset}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="EMAIL",
|
||||
first_time=False,
|
||||
)
|
||||
|
||||
if user.is_password_autoset:
|
||||
# send email
|
||||
uidb64, token = generate_password_token(user=user)
|
||||
forgot_password.delay(
|
||||
user.first_name, user.email, uidb64, token, current_site
|
||||
)
|
||||
return Response({"is_password_autoset": user.is_password_autoset}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
# User should enter password to login
|
||||
return Response({"is_password_autoset": user.is_password_autoset}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -4,23 +4,19 @@ import uuid
|
||||
import random
|
||||
import string
|
||||
import json
|
||||
import requests
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.hashers import make_password
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework import status
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
from sentry_sdk import capture_exception, capture_message
|
||||
from sentry_sdk import capture_message
|
||||
|
||||
# Module imports
|
||||
from . import BaseAPIView
|
||||
@@ -32,10 +28,11 @@ from plane.db.models import (
|
||||
ProjectMember,
|
||||
)
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.bgtasks.magic_link_code_task import magic_link
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.event_tracking_task import auth_events
|
||||
from plane.bgtasks.user_count_task import update_user_instance_user_count
|
||||
|
||||
|
||||
def get_tokens_for_user(user):
|
||||
refresh = RefreshToken.for_user(user)
|
||||
@@ -49,24 +46,16 @@ class SignUpEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request):
|
||||
instance_configuration = InstanceConfiguration.objects.values("key", "value")
|
||||
if (
|
||||
not get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).exists()
|
||||
):
|
||||
# Check if the instance configuration is done
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.values("key", "value")
|
||||
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
@@ -87,6 +76,25 @@ class SignUpEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# If the sign up is not enabled and the user does not have invite disallow him from creating the account
|
||||
if (
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
== "0"
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=email,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if the user already exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
return Response(
|
||||
@@ -105,81 +113,16 @@ class SignUpEndpoint(BaseAPIView):
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
# Check if user has any accepted invites for workspace and add them to workspace
|
||||
workspace_member_invites = WorkspaceMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=workspace_member_invite.workspace_id,
|
||||
member=user,
|
||||
role=workspace_member_invite.role,
|
||||
)
|
||||
for workspace_member_invite in workspace_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Check if user has any project invites
|
||||
project_member_invites = ProjectMemberInvite.objects.filter(
|
||||
email=user.email, accepted=True
|
||||
)
|
||||
|
||||
# Add user to workspace
|
||||
WorkspaceMember.objects.bulk_create(
|
||||
[
|
||||
WorkspaceMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Now add the users to project
|
||||
ProjectMember.objects.bulk_create(
|
||||
[
|
||||
ProjectMember(
|
||||
workspace_id=project_member_invite.workspace_id,
|
||||
role=project_member_invite.role
|
||||
if project_member_invite.role in [5, 10, 15]
|
||||
else 15,
|
||||
member=user,
|
||||
created_by_id=project_member_invite.created_by_id,
|
||||
)
|
||||
for project_member_invite in project_member_invites
|
||||
],
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="EMAIL",
|
||||
first_time=True
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
# Update instance user count
|
||||
update_user_instance_user_count.delay()
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -187,6 +130,14 @@ class SignInEndpoint(BaseAPIView):
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def post(self, request):
|
||||
# Check if the instance configuration is done
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
email = request.data.get("email", False)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
@@ -207,8 +158,10 @@ class SignInEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the user
|
||||
user = User.objects.filter(email=email).first()
|
||||
|
||||
# User is not present in db
|
||||
if user is None:
|
||||
return Response(
|
||||
{
|
||||
@@ -217,7 +170,7 @@ class SignInEndpoint(BaseAPIView):
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
|
||||
# Sign up Process
|
||||
# Check user password
|
||||
if not user.check_password(password):
|
||||
return Response(
|
||||
{
|
||||
@@ -292,7 +245,7 @@ class SignInEndpoint(BaseAPIView):
|
||||
# Delete all the invites
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
# Send event
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
@@ -301,7 +254,7 @@ class SignInEndpoint(BaseAPIView):
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="EMAIL",
|
||||
first_time=False
|
||||
first_time=False,
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
@@ -335,101 +288,20 @@ class SignOutEndpoint(BaseAPIView):
|
||||
return Response({"message": "success"}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class MagicSignInGenerateEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
email = request.data.get("email", False)
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.values("key", "value")
|
||||
if (
|
||||
not get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_MAGIC_LINK_LOGIN",
|
||||
os.environ.get("ENABLE_MAGIC_LINK_LOGIN"),
|
||||
)
|
||||
and not (
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
)
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not email:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Clean up
|
||||
email = email.strip().lower()
|
||||
validate_email(email)
|
||||
|
||||
## Generate a random token
|
||||
token = (
|
||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
)
|
||||
|
||||
ri = redis_instance()
|
||||
|
||||
key = "magic_" + str(email)
|
||||
|
||||
# Check if the key already exists in python
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
current_attempt = data["current_attempt"] + 1
|
||||
|
||||
if data["current_attempt"] > 2:
|
||||
return Response(
|
||||
{"error": "Max attempts exhausted. Please try again later."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
value = {
|
||||
"current_attempt": current_attempt,
|
||||
"email": email,
|
||||
"token": token,
|
||||
}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
else:
|
||||
value = {"current_attempt": 0, "email": email, "token": token}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
magic_link.delay(email, key, token, current_site)
|
||||
|
||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class MagicSignInEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
# Check if the instance configuration is done
|
||||
instance = Instance.objects.first()
|
||||
if instance is None or not instance.is_setup_done:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user_token = request.data.get("token", "").strip()
|
||||
key = request.data.get("key", False).strip().lower()
|
||||
|
||||
@@ -448,48 +320,28 @@ class MagicSignInEndpoint(BaseAPIView):
|
||||
email = data["email"]
|
||||
|
||||
if str(token) == str(user_token):
|
||||
if User.objects.filter(email=email).exists():
|
||||
user = User.objects.get(email=email)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="MAGIC_LINK",
|
||||
first_time=False
|
||||
)
|
||||
|
||||
else:
|
||||
user = User.objects.create(
|
||||
user = User.objects.get(email=email)
|
||||
if not user.is_active:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your account has been deactivated. Please contact your site administrator."
|
||||
},
|
||||
status=status.HTTP_403_FORBIDDEN,
|
||||
)
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="MAGIC_LINK",
|
||||
first_time=False,
|
||||
)
|
||||
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
email=email,
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium="MAGIC_LINK",
|
||||
first_time=True
|
||||
)
|
||||
|
||||
user.is_active = True
|
||||
user.is_email_verified = True
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
|
||||
@@ -539,9 +539,8 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, cycle_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
@@ -576,24 +575,9 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
)
|
||||
|
||||
issues_data = IssueStateSerializer(issues, many=True).data
|
||||
|
||||
if sub_group_by and sub_group_by == group_by:
|
||||
return Response(
|
||||
{"error": "Group by and sub group by cannot be same"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
issues_data, status=status.HTTP_200_OK
|
||||
)
|
||||
issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id, cycle_id):
|
||||
issues = request.data.get("issues", [])
|
||||
@@ -709,56 +693,6 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class CycleIssueGroupedEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, cycle_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(bridge_id=F("issue_cycle__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(
|
||||
issue_dict,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class CycleDateCheckEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
|
||||
@@ -4,6 +4,7 @@ import random
|
||||
from itertools import chain
|
||||
|
||||
# Django imports
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.db.models import (
|
||||
Prefetch,
|
||||
@@ -132,6 +133,7 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
@@ -215,25 +217,9 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
if sub_group_by and sub_group_by == group_by:
|
||||
return Response(
|
||||
{"error": "Group by and sub group by cannot be same"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
@@ -311,104 +297,6 @@ class IssueViewSet(WebhookMixin, BaseViewSet):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class IssueListEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
issue_queryset = (
|
||||
Issue.objects.filter(workspace__slug=slug, project_id=project_id)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
serializer = IssueLiteSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
)
|
||||
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueListGroupedEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
issue_queryset = (
|
||||
Issue.objects.filter(workspace__slug=slug, project_id=project_id)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(
|
||||
issue_dict,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class UserWorkSpaceIssues(BaseAPIView):
|
||||
@method_decorator(gzip_page)
|
||||
def get(self, request, slug):
|
||||
@@ -1083,6 +971,7 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
show_sub_issues = request.GET.get("show_sub_issues", "true")
|
||||
|
||||
@@ -1173,14 +1062,9 @@ class IssueArchiveViewSet(BaseViewSet):
|
||||
else issue_queryset.filter(parent__isnull=True)
|
||||
)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
return Response(group_results(issues, group_by), status=status.HTTP_200_OK)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk=None):
|
||||
issue = Issue.objects.get(
|
||||
@@ -1580,6 +1464,7 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
@@ -1662,18 +1547,9 @@ class IssueDraftViewSet(BaseViewSet):
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
grouped_results = group_results(issues, group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
project = Project.objects.get(pk=project_id)
|
||||
|
||||
@@ -324,9 +324,8 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug, project_id, module_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
@@ -360,24 +359,9 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
issues_data = IssueStateSerializer(issues, many=True).data
|
||||
|
||||
if sub_group_by and sub_group_by == group_by:
|
||||
return Response(
|
||||
{"error": "Group by and sub group by cannot be same"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues_data, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(
|
||||
issues_data, status=status.HTTP_200_OK
|
||||
)
|
||||
issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id, module_id):
|
||||
issues = request.data.get("issues", [])
|
||||
@@ -482,55 +466,6 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class ModuleIssueGroupedEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id, module_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
issues = (
|
||||
Issue.issue_objects.filter(issue_module__module_id=module_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(bridge_id=F("issue_module__id"))
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("state")
|
||||
.select_related("parent")
|
||||
.prefetch_related("assignees")
|
||||
.prefetch_related("labels")
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
issues = IssueStateSerializer(issues, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(
|
||||
issue_dict,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
class ModuleLinkViewSet(BaseViewSet):
|
||||
permission_classes = [
|
||||
ProjectEntityPermission,
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import uuid
|
||||
import requests
|
||||
import os
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
@@ -31,8 +30,9 @@ from plane.db.models import (
|
||||
)
|
||||
from plane.bgtasks.event_tracking_task import auth_events
|
||||
from .base import BaseAPIView
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.bgtasks.user_count_task import update_user_instance_user_count
|
||||
|
||||
|
||||
def get_tokens_for_user(user):
|
||||
@@ -136,6 +136,14 @@ class OauthEndpoint(BaseAPIView):
|
||||
|
||||
def post(self, request):
|
||||
try:
|
||||
# Check if instance is registered or not
|
||||
instance = Instance.objects.first()
|
||||
if instance is None and not instance.is_setup_done:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
medium = request.data.get("medium", False)
|
||||
id_token = request.data.get("credential", False)
|
||||
client_id = request.data.get("clientId", False)
|
||||
@@ -143,34 +151,17 @@ class OauthEndpoint(BaseAPIView):
|
||||
instance_configuration = InstanceConfiguration.objects.values(
|
||||
"key", "value"
|
||||
)
|
||||
if (
|
||||
(
|
||||
not get_configuration_value(
|
||||
instance_configuration,
|
||||
"GOOGLE_CLIENT_ID",
|
||||
os.environ.get("GOOGLE_CLIENT_ID"),
|
||||
)
|
||||
or not get_configuration_value(
|
||||
instance_configuration,
|
||||
"GITHUB_CLIENT_ID",
|
||||
os.environ.get("GITHUB_CLIENT_ID"),
|
||||
)
|
||||
)
|
||||
and not (
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
)
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
).exists()
|
||||
if not get_configuration_value(
|
||||
instance_configuration,
|
||||
"GOOGLE_CLIENT_ID",
|
||||
os.environ.get("GOOGLE_CLIENT_ID"),
|
||||
) or not get_configuration_value(
|
||||
instance_configuration,
|
||||
"GITHUB_CLIENT_ID",
|
||||
os.environ.get("GITHUB_CLIENT_ID"),
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "New account creation is disabled. Please contact your site administrator"
|
||||
},
|
||||
{"error": "Github or Google login is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
@@ -286,8 +277,8 @@ class OauthEndpoint(BaseAPIView):
|
||||
"last_login_at": timezone.now(),
|
||||
},
|
||||
)
|
||||
|
||||
# Send event
|
||||
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
@@ -295,8 +286,8 @@ class OauthEndpoint(BaseAPIView):
|
||||
user_agent=request.META.get("HTTP_USER_AGENT"),
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium=medium.upper(),
|
||||
first_time=False
|
||||
medium=medium.upper(),
|
||||
first_time=False,
|
||||
)
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
@@ -309,15 +300,18 @@ class OauthEndpoint(BaseAPIView):
|
||||
|
||||
except User.DoesNotExist:
|
||||
## Signup Case
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.values(
|
||||
"key", "value"
|
||||
)
|
||||
if (
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"ENABLE_SIGNUP",
|
||||
os.environ.get("ENABLE_SIGNUP", "0"),
|
||||
)
|
||||
== "0"
|
||||
and not WorkspaceMemberInvite.objects.filter(
|
||||
email=request.user.email
|
||||
email=email,
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
@@ -341,7 +335,7 @@ class OauthEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
user = User(
|
||||
user = User.objects.create(
|
||||
username=username,
|
||||
email=email,
|
||||
mobile_number=mobile_number,
|
||||
@@ -352,7 +346,6 @@ class OauthEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
user.set_password(uuid.uuid4().hex)
|
||||
user.is_password_autoset = True
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
@@ -418,7 +411,7 @@ class OauthEndpoint(BaseAPIView):
|
||||
workspace_member_invites.delete()
|
||||
project_member_invites.delete()
|
||||
|
||||
# Send event
|
||||
# Send event
|
||||
if settings.POSTHOG_API_KEY and settings.POSTHOG_HOST:
|
||||
auth_events.delay(
|
||||
user=user.id,
|
||||
@@ -427,7 +420,7 @@ class OauthEndpoint(BaseAPIView):
|
||||
ip=request.META.get("REMOTE_ADDR"),
|
||||
event_name="SIGN_IN",
|
||||
medium=medium.upper(),
|
||||
first_time=True
|
||||
first_time=True,
|
||||
)
|
||||
|
||||
SocialLoginConnection.objects.update_or_create(
|
||||
@@ -445,4 +438,7 @@ class OauthEndpoint(BaseAPIView):
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
# Update the user count
|
||||
update_user_instance_user_count.delay()
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@@ -17,7 +17,7 @@ from plane.license.models import Instance, InstanceAdmin
|
||||
from plane.utils.paginator import BasePaginator
|
||||
|
||||
|
||||
from django.db.models import Q, F, Count, Case, When, Value, IntegerField
|
||||
from django.db.models import Q, F, Count, Case, When, IntegerField
|
||||
|
||||
|
||||
class UserEndpoint(BaseViewSet):
|
||||
@@ -52,7 +52,6 @@ class UserEndpoint(BaseViewSet):
|
||||
projects_to_deactivate = []
|
||||
workspaces_to_deactivate = []
|
||||
|
||||
|
||||
projects = ProjectMember.objects.filter(
|
||||
member=request.user, is_active=True
|
||||
).annotate(
|
||||
@@ -113,6 +112,15 @@ class UserEndpoint(BaseViewSet):
|
||||
|
||||
# Deactivate the user
|
||||
user.is_active = False
|
||||
user.last_workspace_id = None
|
||||
user.is_tour_completed = False
|
||||
user.is_onboarded = False
|
||||
user.onboarding_step = {
|
||||
"workspace_join": False,
|
||||
"profile_complete": False,
|
||||
"workspace_create": False,
|
||||
"workspace_invite": False,
|
||||
}
|
||||
user.save()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -135,9 +143,9 @@ class UpdateUserTourCompletedEndpoint(BaseAPIView):
|
||||
|
||||
class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
def get(self, request):
|
||||
queryset = IssueActivity.objects.filter(
|
||||
actor=request.user
|
||||
).select_related("actor", "workspace", "issue", "project")
|
||||
queryset = IssueActivity.objects.filter(actor=request.user).select_related(
|
||||
"actor", "workspace", "issue", "project"
|
||||
)
|
||||
|
||||
return self.paginate(
|
||||
request=request,
|
||||
@@ -146,3 +154,4 @@ class UserActivityEndpoint(BaseAPIView, BasePaginator):
|
||||
issue_activities, many=True
|
||||
).data,
|
||||
)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from . import BaseViewSet
|
||||
from . import BaseViewSet, BaseAPIView
|
||||
from plane.app.serializers import (
|
||||
GlobalViewSerializer,
|
||||
IssueViewSerializer,
|
||||
@@ -95,6 +95,7 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
@method_decorator(gzip_page)
|
||||
def list(self, request, slug):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
@@ -177,25 +178,13 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
sub_group_by = request.GET.get("sub_group_by", False)
|
||||
if sub_group_by and sub_group_by == group_by:
|
||||
return Response(
|
||||
{"error": "Group by and sub group by cannot be same"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if group_by:
|
||||
grouped_results = group_results(issues, group_by, sub_group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(
|
||||
issue_dict,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class IssueViewViewSet(BaseViewSet):
|
||||
|
||||
@@ -113,7 +113,10 @@ class WorkSpaceViewSet(BaseViewSet):
|
||||
return (
|
||||
self.filter_queryset(super().get_queryset().select_related("owner"))
|
||||
.order_by("name")
|
||||
.filter(workspace_member__member=self.request.user)
|
||||
.filter(
|
||||
workspace_member__member=self.request.user,
|
||||
workspace_member__is_active=True,
|
||||
)
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.select_related("owner")
|
||||
@@ -189,17 +192,21 @@ class UserWorkSpacesEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
workspace = (
|
||||
(
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch("workspace_member", queryset=WorkspaceMember.objects.all())
|
||||
Workspace.objects.prefetch_related(
|
||||
Prefetch(
|
||||
"workspace_member",
|
||||
queryset=WorkspaceMember.objects.filter(
|
||||
member=request.user, is_active=True
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
workspace_member__member=request.user,
|
||||
)
|
||||
.select_related("owner")
|
||||
)
|
||||
.select_related("owner")
|
||||
.annotate(total_members=member_count)
|
||||
.annotate(total_issues=issue_count)
|
||||
.filter(
|
||||
workspace_member__member=request.user, workspace_member__is_active=True
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
serializer = WorkSpaceSerializer(self.filter_queryset(workspace), many=True)
|
||||
@@ -319,7 +326,7 @@ class WorkspaceInvitationsViewset(BaseViewSet):
|
||||
workspace_invitations, batch_size=10, ignore_conflicts=True
|
||||
)
|
||||
|
||||
current_site = request.META.get('HTTP_ORIGIN')
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
|
||||
# Send invitations
|
||||
for invitation in workspace_invitations:
|
||||
@@ -418,7 +425,9 @@ class WorkspaceJoinEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def get(self, request, slug, pk):
|
||||
workspace_invitation = WorkspaceMemberInvite.objects.get(workspace__slug=slug, pk=pk)
|
||||
workspace_invitation = WorkspaceMemberInvite.objects.get(
|
||||
workspace__slug=slug, pk=pk
|
||||
)
|
||||
serializer = WorkSpaceMemberInviteSerializer(workspace_invitation)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -1198,6 +1207,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
@@ -1299,75 +1309,12 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True).data
|
||||
|
||||
## Grouping the results
|
||||
group_by = request.GET.get("group_by", False)
|
||||
if group_by:
|
||||
grouped_results = group_results(issues, group_by)
|
||||
return Response(
|
||||
grouped_results,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
|
||||
class WorkspaceUserProfileIssuesGroupedEndpoint(BaseAPIView):
|
||||
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug, user_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
fields = [field for field in request.GET.get("fields", "").split(",") if field]
|
||||
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.filter(
|
||||
Q(assignees__in=[user_id])
|
||||
| Q(created_by_id=user_id)
|
||||
| Q(issue_subscribers__subscriber_id=user_id),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
).distinct()
|
||||
|
||||
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
|
||||
issues = IssueLiteSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
return Response(
|
||||
issue_dict,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response(issue_dict, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceLabelsEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
# Python imports
|
||||
import csv
|
||||
import io
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
@@ -17,8 +18,8 @@ from sentry_sdk import capture_exception
|
||||
from plane.db.models import Issue
|
||||
from plane.utils.analytics_plot import build_graph_plot
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
row_mapping = {
|
||||
"state__name": "State",
|
||||
@@ -43,7 +44,7 @@ CYCLE_ID = "issue_cycle__cycle_id"
|
||||
MODULE_ID = "issue_module__module_id"
|
||||
|
||||
|
||||
def send_export_email(email, slug, csv_buffer):
|
||||
def send_export_email(email, slug, csv_buffer, rows):
|
||||
"""Helper function to send export email."""
|
||||
subject = "Your Export is ready"
|
||||
html_content = render_to_string("emails/exports/analytics.html", {})
|
||||
@@ -55,47 +56,58 @@ def send_export_email(email, slug, csv_buffer):
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration(instance_configuration=instance_configuration)
|
||||
|
||||
# Send the email if the users don't have smtp configured
|
||||
if EMAIL_HOST and EMAIL_HOST_USER and EMAIL_HOST_PASSWORD:
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
|
||||
payload = {
|
||||
"email": email,
|
||||
"slug": slug,
|
||||
"rows": rows,
|
||||
}
|
||||
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/users/analytics/",
|
||||
headers=headers,
|
||||
json=payload,
|
||||
)
|
||||
return
|
||||
|
||||
connection = get_connection(
|
||||
host=get_configuration_value(
|
||||
instance_configuration, "EMAIL_HOST", os.environ.get("EMAIL_HOST")
|
||||
),
|
||||
port=int(
|
||||
get_configuration_value(
|
||||
instance_configuration, "EMAIL_PORT", os.environ.get("EMAIL_PORT")
|
||||
)
|
||||
),
|
||||
username=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER"),
|
||||
),
|
||||
password=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
),
|
||||
use_tls=bool(
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
),
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=bool(EMAIL_USE_TLS),
|
||||
)
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
),
|
||||
from_email=EMAIL_FROM,
|
||||
to=[email],
|
||||
connection=connection,
|
||||
)
|
||||
msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue())
|
||||
msg.send(fail_silently=False)
|
||||
return
|
||||
|
||||
|
||||
def get_assignee_details(slug, filters):
|
||||
@@ -463,8 +475,11 @@ def analytic_export_task(email, data, slug):
|
||||
)
|
||||
|
||||
csv_buffer = generate_csv_from_rows(rows)
|
||||
send_export_email(email, slug, csv_buffer)
|
||||
send_export_email(email, slug, csv_buffer, rows)
|
||||
return
|
||||
except Exception as e:
|
||||
print(e)
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# Python imports
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
|
||||
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
|
||||
|
||||
@shared_task
|
||||
def email_verification(first_name, email, token, current_site):
|
||||
try:
|
||||
realtivelink = "/request-email-verification/" + "?token=" + str(token)
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
subject = "Verify your Email!"
|
||||
|
||||
context = {
|
||||
"first_name": first_name,
|
||||
"verification_url": abs_url,
|
||||
}
|
||||
|
||||
html_content = render_to_string("emails/auth/email_verification.html", context)
|
||||
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
# Configure email connection from the database
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
connection = get_connection(
|
||||
host=get_configuration_value(
|
||||
instance_configuration, "EMAIL_HOST", os.environ.get("EMAIL_HOST")
|
||||
),
|
||||
port=int(
|
||||
get_configuration_value(
|
||||
instance_configuration, "EMAIL_PORT", os.environ.get("EMAIL_PORT")
|
||||
)
|
||||
),
|
||||
username=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER"),
|
||||
),
|
||||
password=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
),
|
||||
use_tls=bool(
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# Initiate email alternatives
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
),
|
||||
to=[email],
|
||||
connection=connection,
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
return
|
||||
@@ -10,7 +10,6 @@ from sentry_sdk import capture_exception
|
||||
|
||||
@shared_task
|
||||
def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
|
||||
print(user, email, user_agent, ip, event_name, medium, first_time)
|
||||
try:
|
||||
posthog = Posthog(settings.POSTHOG_API_KEY, host=settings.POSTHOG_HOST)
|
||||
posthog.capture(
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# Python import
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
@@ -12,64 +14,84 @@ from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
def forgot_password(first_name, email, uidb64, token, current_site):
|
||||
|
||||
try:
|
||||
realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}"
|
||||
abs_url = current_site + realtivelink
|
||||
relative_link = (
|
||||
f"/accounts/password/?uidb64={uidb64}&token={token}&email={email}"
|
||||
)
|
||||
abs_url = str(current_site) + relative_link
|
||||
|
||||
subject = "Reset Your Password - Plane"
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration(instance_configuration=instance_configuration)
|
||||
|
||||
# Send the email if the users don't have smtp configured
|
||||
if not (EMAIL_HOST and EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
|
||||
# headers
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
|
||||
payload = {
|
||||
"abs_url": abs_url,
|
||||
"first_name": first_name,
|
||||
"email": email,
|
||||
}
|
||||
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/users/forgot-password/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
subject = "A new password to your Plane account has been requested"
|
||||
|
||||
context = {
|
||||
"first_name": first_name,
|
||||
"forgot_password_url": abs_url,
|
||||
"email": email,
|
||||
}
|
||||
|
||||
html_content = render_to_string("emails/auth/forgot_password.html", context)
|
||||
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.filter(key__startswith='EMAIL_').values("key", "value")
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
connection = get_connection(
|
||||
host=get_configuration_value(
|
||||
instance_configuration, "EMAIL_HOST", os.environ.get("EMAIL_HOST")
|
||||
),
|
||||
port=int(
|
||||
get_configuration_value(
|
||||
instance_configuration, "EMAIL_PORT", os.environ.get("EMAIL_PORT")
|
||||
)
|
||||
),
|
||||
username=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER"),
|
||||
),
|
||||
password=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
),
|
||||
use_tls=bool(
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
),
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=bool(EMAIL_USE_TLS),
|
||||
)
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
),
|
||||
from_email=EMAIL_FROM,
|
||||
to=[email],
|
||||
connection=connection,
|
||||
)
|
||||
|
||||
@@ -26,6 +26,7 @@ from plane.db.models import (
|
||||
IssueProperty,
|
||||
)
|
||||
from plane.bgtasks.user_welcome_task import send_welcome_slack
|
||||
from plane.bgtasks.user_count_task import update_user_instance_user_count
|
||||
|
||||
|
||||
@shared_task
|
||||
@@ -120,6 +121,9 @@ def service_importer(service, importer_id):
|
||||
batch_size=100,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
|
||||
# Update instance user count
|
||||
update_user_instance_user_count.delay()
|
||||
|
||||
# Check if sync config is on for github importers
|
||||
if service == "github" and importer.config.get("sync", False):
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# Python imports
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
@@ -12,63 +14,69 @@ from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
def magic_link(email, key, token, current_site):
|
||||
try:
|
||||
realtivelink = f"/magic-sign-in/?password={token}&key={key}"
|
||||
abs_url = current_site + realtivelink
|
||||
|
||||
subject = "Login for Plane"
|
||||
|
||||
context = {"magic_url": abs_url, "code": token}
|
||||
|
||||
html_content = render_to_string("emails/auth/magic_signin.html", context)
|
||||
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration(instance_configuration=instance_configuration)
|
||||
|
||||
# Send the email if the users don't have smtp configured
|
||||
if not (EMAIL_HOST and EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
|
||||
payload = {
|
||||
"token": token,
|
||||
"email": email,
|
||||
}
|
||||
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/users/magic-code/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
# Send the mail
|
||||
subject = f"Your unique Plane login code is {token}"
|
||||
context = {"code": token, "email": email}
|
||||
|
||||
html_content = render_to_string("emails/auth/magic_signin.html", context)
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
connection = get_connection(
|
||||
host=get_configuration_value(
|
||||
instance_configuration, "EMAIL_HOST", os.environ.get("EMAIL_HOST")
|
||||
),
|
||||
port=int(
|
||||
get_configuration_value(
|
||||
instance_configuration, "EMAIL_PORT", os.environ.get("EMAIL_PORT")
|
||||
)
|
||||
),
|
||||
username=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER"),
|
||||
),
|
||||
password=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
),
|
||||
use_tls=bool(
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
),
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=bool(EMAIL_USE_TLS),
|
||||
)
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
),
|
||||
from_email=EMAIL_FROM,
|
||||
to=[email],
|
||||
connection=connection,
|
||||
)
|
||||
@@ -76,6 +84,7 @@ def magic_link(email, key, token, current_site):
|
||||
msg.send()
|
||||
return
|
||||
except Exception as e:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
# Print logs if in DEBUG mode
|
||||
if settings.DEBUG:
|
||||
|
||||
45
apiserver/plane/bgtasks/user_count_task.py
Normal file
45
apiserver/plane/bgtasks/user_count_task.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Python imports
|
||||
import json
|
||||
import requests
|
||||
import os
|
||||
|
||||
# django imports
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import User
|
||||
from plane.license.models import Instance
|
||||
|
||||
@shared_task
|
||||
def update_user_instance_user_count():
|
||||
try:
|
||||
instance_users = User.objects.filter(is_bot=False).count()
|
||||
instance = Instance.objects.update(user_count=instance_users)
|
||||
|
||||
# Update the count in the license engine
|
||||
payload = {
|
||||
"user_count": User.objects.count(),
|
||||
}
|
||||
|
||||
# Save the user in control center
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
|
||||
# Update the license engine
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
capture_exception(e)
|
||||
@@ -90,17 +90,6 @@ def webhook_task(self, webhook, slug, event, event_data, action):
|
||||
else None
|
||||
)
|
||||
|
||||
# Use HMAC for generating signature
|
||||
if webhook.secret_key:
|
||||
event_data_json = json.dumps(event_data) if event_data is not None else "{}"
|
||||
hmac_signature = hmac.new(
|
||||
webhook.secret_key.encode("utf-8"),
|
||||
event_data_json.encode("utf-8"),
|
||||
hashlib.sha256,
|
||||
)
|
||||
signature = hmac_signature.hexdigest()
|
||||
headers["X-Plane-Signature"] = signature
|
||||
|
||||
action = {
|
||||
"POST": "create",
|
||||
"PATCH": "update",
|
||||
@@ -116,6 +105,16 @@ def webhook_task(self, webhook, slug, event, event_data, action):
|
||||
"data": event_data,
|
||||
}
|
||||
|
||||
# Use HMAC for generating signature
|
||||
if webhook.secret_key:
|
||||
hmac_signature = hmac.new(
|
||||
webhook.secret_key.encode("utf-8"),
|
||||
json.dumps(payload, sort_keys=True).encode("utf-8"),
|
||||
hashlib.sha256,
|
||||
)
|
||||
signature = hmac_signature.hexdigest()
|
||||
headers["X-Plane-Signature"] = signature
|
||||
|
||||
# Send the webhook event
|
||||
response = requests.post(
|
||||
webhook.url,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
# Python imports
|
||||
import os
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Django imports
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
@@ -15,8 +17,8 @@ from slack_sdk.errors import SlackApiError
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import Workspace, WorkspaceMemberInvite, User
|
||||
from plane.license.models import InstanceConfiguration
|
||||
from plane.license.utils.instance_value import get_configuration_value
|
||||
from plane.license.models import InstanceConfiguration, Instance
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
|
||||
|
||||
@shared_task
|
||||
@@ -33,16 +35,54 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
relative_link = f"/workspace-invitations/?invitation_id={workspace_member_invite.id}&email={email}&slug={workspace.slug}"
|
||||
|
||||
# The complete url including the domain
|
||||
abs_url = current_site + relative_link
|
||||
abs_url = str(current_site) + relative_link
|
||||
|
||||
instance_configuration = InstanceConfiguration.objects.filter(
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration(instance_configuration=instance_configuration)
|
||||
|
||||
# Send the email if the users don't have smtp configured
|
||||
if not (EMAIL_HOST and EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
|
||||
payload = {
|
||||
"user": user.first_name or user.display_name or user.email,
|
||||
"workspace_name": workspace.name,
|
||||
"invitation_url": abs_url,
|
||||
"email": email,
|
||||
}
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/users/workspace-invitation/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
# Subject of the email
|
||||
subject = f"{user.first_name or user.display_name or user.email} invited you to join {workspace.name} on Plane"
|
||||
subject = f"{user.first_name or user.display_name or user.email} has invited you to join them in {workspace.name} on Plane"
|
||||
|
||||
context = {
|
||||
"email": email,
|
||||
"first_name": invitor,
|
||||
"first_name": user.first_name or user.display_name or user.email,
|
||||
"workspace_name": workspace.name,
|
||||
"invitation_url": abs_url,
|
||||
"abs_url": abs_url,
|
||||
}
|
||||
|
||||
html_content = render_to_string(
|
||||
@@ -58,41 +98,17 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
key__startswith="EMAIL_"
|
||||
).values("key", "value")
|
||||
connection = get_connection(
|
||||
host=get_configuration_value(
|
||||
instance_configuration, "EMAIL_HOST", os.environ.get("EMAIL_HOST")
|
||||
),
|
||||
port=int(
|
||||
get_configuration_value(
|
||||
instance_configuration, "EMAIL_PORT", os.environ.get("EMAIL_PORT")
|
||||
)
|
||||
),
|
||||
username=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER"),
|
||||
),
|
||||
password=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
),
|
||||
use_tls=bool(
|
||||
get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
),
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=bool(EMAIL_USE_TLS),
|
||||
)
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
),
|
||||
from_email=EMAIL_FROM,
|
||||
to=[email],
|
||||
connection=connection,
|
||||
)
|
||||
@@ -112,6 +128,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
|
||||
|
||||
return
|
||||
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
|
||||
print("Workspace or WorkspaceMember Invite Does not exists")
|
||||
return
|
||||
except Exception as e:
|
||||
# Print logs if in DEBUG mode
|
||||
|
||||
@@ -21,7 +21,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('transaction', models.UUIDField(default=uuid.uuid4)),
|
||||
('entity_identifier', models.UUIDField(null=True)),
|
||||
('entity_name', models.CharField(choices=[('to_do', 'To Do'), ('issue', 'issue'), ('image', 'Image'), ('video', 'Video'), ('file', 'File'), ('link', 'Link'), ('cycle', 'Cycle'), ('module', 'Module'), ('back_link', 'Back Link'), ('forward_link', 'Forward Link'), ('mention', 'Mention')], max_length=30, verbose_name='Transaction Type')),
|
||||
('entity_name', models.CharField(choices=[('to_do', 'To Do'), ('issue', 'issue'), ('image', 'Image'), ('video', 'Video'), ('file', 'File'), ('link', 'Link'), ('cycle', 'Cycle'), ('module', 'Module'), ('back_link', 'Back Link'), ('forward_link', 'Forward Link'), ('page_mention', 'Page Mention'), ('user_mention', 'User Mention')], max_length=30, verbose_name='Transaction Type')),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='page_log', to='db.page')),
|
||||
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')),
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
# Generated by Django 4.2.5 on 2023-11-17 08:48
|
||||
|
||||
from django.db import migrations, models
|
||||
import plane.db.models.workspace
|
||||
|
||||
def user_password_autoset(apps, schema_editor):
|
||||
User = apps.get_model("db", "User")
|
||||
User.objects.update(is_password_autoset=True)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -20,4 +25,15 @@ class Migration(migrations.Migration):
|
||||
name='organization_size',
|
||||
field=models.CharField(blank=True, max_length=20, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='fileasset',
|
||||
name='is_deleted',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workspace',
|
||||
name='slug',
|
||||
field=models.SlugField(max_length=48, unique=True, validators=[plane.db.models.workspace.slug_validator]),
|
||||
),
|
||||
migrations.RunPython(user_password_autoset),
|
||||
]
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-20 08:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0050_user_use_case_alter_workspace_organization_size'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='fileasset',
|
||||
name='is_deleted',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,19 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-11-23 14:57
|
||||
|
||||
from django.db import migrations, models
|
||||
import plane.db.models.workspace
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('db', '0051_fileasset_is_deleted'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='workspace',
|
||||
name='slug',
|
||||
field=models.SlugField(max_length=48, unique=True, validators=[plane.db.models.workspace.slug_validator]),
|
||||
),
|
||||
]
|
||||
@@ -7,7 +7,6 @@ from django.db import models
|
||||
from django.conf import settings
|
||||
from django.db.models.signals import post_save
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
@@ -141,8 +140,10 @@ class Issue(ProjectBaseModel):
|
||||
)["largest"]
|
||||
# aggregate can return None! Check it first.
|
||||
# If it isn't none, just use the last ID specified (which should be the greatest) and add one to it
|
||||
if last_id is not None:
|
||||
if last_id:
|
||||
self.sequence_id = last_id + 1
|
||||
else:
|
||||
self.sequence_id = 1
|
||||
|
||||
largest_sort_order = Issue.objects.filter(
|
||||
project=self.project, state=self.state
|
||||
|
||||
@@ -57,7 +57,8 @@ class PageLog(ProjectBaseModel):
|
||||
("module", "Module"),
|
||||
("back_link", "Back Link"),
|
||||
("forward_link", "Forward Link"),
|
||||
("mention", "Mention"),
|
||||
("page_mention", "Page Mention"),
|
||||
("user_mention", "User Mention"),
|
||||
)
|
||||
transaction = models.UUIDField(default=uuid.uuid4)
|
||||
page = models.ForeignKey(
|
||||
|
||||
@@ -1 +1 @@
|
||||
from .instance import InstanceOwnerPermission, InstanceAdminPermission
|
||||
from .instance import InstanceAdminPermission
|
||||
|
||||
@@ -5,20 +5,6 @@ from rest_framework.permissions import BasePermission
|
||||
from plane.license.models import Instance, InstanceAdmin
|
||||
|
||||
|
||||
class InstanceOwnerPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
if request.user.is_anonymous:
|
||||
return False
|
||||
|
||||
instance = Instance.objects.first()
|
||||
return InstanceAdmin.objects.filter(
|
||||
role=20,
|
||||
instance=instance,
|
||||
user=request.user,
|
||||
).exists()
|
||||
|
||||
|
||||
class InstanceAdminPermission(BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
from plane.license.models import Instance, InstanceAdmin, InstanceConfiguration
|
||||
from plane.app.serializers import BaseSerializer
|
||||
from plane.app.serializers import UserAdminLiteSerializer
|
||||
|
||||
from plane.license.utils.encryption import decrypt_data
|
||||
|
||||
class InstanceSerializer(BaseSerializer):
|
||||
primary_owner_details = UserAdminLiteSerializer(source="primary_owner", read_only=True)
|
||||
@@ -12,14 +12,13 @@ class InstanceSerializer(BaseSerializer):
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"id",
|
||||
"primary_owner",
|
||||
"primary_email",
|
||||
"instance_id",
|
||||
"license_key",
|
||||
"api_key",
|
||||
"version",
|
||||
"email",
|
||||
"last_checked_at",
|
||||
"is_setup_done",
|
||||
]
|
||||
|
||||
|
||||
@@ -40,3 +39,11 @@ class InstanceConfigurationSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = InstanceConfiguration
|
||||
fields = "__all__"
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
# Decrypt secrets value
|
||||
if instance.key in ["OPENAI_API_KEY", "GITHUB_CLIENT_SECRET", "EMAIL_HOST_PASSWORD", "UNSPLASH_ACESS_KEY"] and instance.value is not None:
|
||||
data["value"] = decrypt_data(instance.value)
|
||||
|
||||
return data
|
||||
@@ -1,6 +1,9 @@
|
||||
from .instance import (
|
||||
InstanceEndpoint,
|
||||
TransferPrimaryOwnerEndpoint,
|
||||
InstanceAdminEndpoint,
|
||||
InstanceConfigurationEndpoint,
|
||||
AdminSetupMagicSignInEndpoint,
|
||||
SignUpScreenVisitedEndpoint,
|
||||
AdminMagicSignInGenerateEndpoint,
|
||||
AdminSetUserPasswordEndpoint,
|
||||
)
|
||||
|
||||
@@ -2,13 +2,22 @@
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import uuid
|
||||
import random
|
||||
import string
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.contrib.auth.hashers import make_password
|
||||
from django.core.validators import validate_email
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.conf import settings
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework_simplejwt.tokens import RefreshToken
|
||||
|
||||
# Module imports
|
||||
from plane.app.views import BaseAPIView
|
||||
@@ -18,24 +27,25 @@ from plane.license.api.serializers import (
|
||||
InstanceAdminSerializer,
|
||||
InstanceConfigurationSerializer,
|
||||
)
|
||||
from plane.app.serializers import UserSerializer
|
||||
from plane.license.api.permissions import (
|
||||
InstanceOwnerPermission,
|
||||
InstanceAdminPermission,
|
||||
)
|
||||
from plane.db.models import User
|
||||
from plane.license.utils.encryption import encrypt_data
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.bgtasks.magic_link_code_task import magic_link
|
||||
|
||||
|
||||
class InstanceEndpoint(BaseAPIView):
|
||||
def get_permissions(self):
|
||||
if self.request.method in ["POST", "PATCH"]:
|
||||
self.permission_classes = [
|
||||
InstanceOwnerPermission,
|
||||
if self.request.method == "PATCH":
|
||||
return [
|
||||
InstanceAdminPermission(),
|
||||
]
|
||||
else:
|
||||
self.permission_classes = [
|
||||
InstanceAdminPermission,
|
||||
]
|
||||
return super(InstanceEndpoint, self).get_permissions()
|
||||
return [
|
||||
AllowAny(),
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
# Check if the instance is registered
|
||||
@@ -47,23 +57,17 @@ class InstanceEndpoint(BaseAPIView):
|
||||
# Load JSON content from the file
|
||||
data = json.load(file)
|
||||
|
||||
license_engine_base_url = os.environ.get("LICENSE_ENGINE_BASE_URL")
|
||||
|
||||
if not license_engine_base_url:
|
||||
raise Response(
|
||||
{"error": "LICENSE_ENGINE_BASE_URL is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
payload = {
|
||||
"email": request.user.email,
|
||||
"instance_key":settings.INSTANCE_KEY,
|
||||
"version": data.get("version", 0.1),
|
||||
"machine_signature": os.environ.get("MACHINE_SIGNATURE"),
|
||||
"user_count": User.objects.filter(is_bot=False).count(),
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{license_engine_base_url}/api/instances",
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
@@ -77,21 +81,15 @@ class InstanceEndpoint(BaseAPIView):
|
||||
license_key=data.get("license_key"),
|
||||
api_key=data.get("api_key"),
|
||||
version=data.get("version"),
|
||||
primary_email=data.get("email"),
|
||||
primary_owner=request.user,
|
||||
last_checked_at=timezone.now(),
|
||||
)
|
||||
# Create instance admin
|
||||
_ = InstanceAdmin.objects.create(
|
||||
user=request.user,
|
||||
instance=instance,
|
||||
role=20,
|
||||
user_count=data.get("user_count", 0),
|
||||
)
|
||||
|
||||
serializer = InstanceSerializer(instance)
|
||||
data = serializer.data
|
||||
data["is_activated"] = True
|
||||
return Response(
|
||||
{
|
||||
"message": f"Instance succesfully registered with owner: {instance.primary_owner.email}"
|
||||
},
|
||||
data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
return Response(
|
||||
@@ -100,9 +98,7 @@ class InstanceEndpoint(BaseAPIView):
|
||||
)
|
||||
else:
|
||||
return Response(
|
||||
{
|
||||
"message": f"Instance already registered with instance owner: {instance.primary_owner.email}"
|
||||
},
|
||||
{"message": "Instance already registered"},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@@ -110,11 +106,15 @@ class InstanceEndpoint(BaseAPIView):
|
||||
instance = Instance.objects.first()
|
||||
# get the instance
|
||||
if instance is None:
|
||||
return Response({"activated": False}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(
|
||||
{"is_activated": False, "is_setup_done": False},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
# Return instance
|
||||
serializer = InstanceSerializer(instance)
|
||||
serializer.data["activated"] = True
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
data = serializer.data
|
||||
data["is_activated"] = True
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request):
|
||||
# Get the instance
|
||||
@@ -122,62 +122,37 @@ class InstanceEndpoint(BaseAPIView):
|
||||
serializer = InstanceSerializer(instance, data=request.data, partial=True)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
# Save the user in control center
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
# Update instance settings in the license engine
|
||||
_ = requests.patch(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps(
|
||||
{
|
||||
"is_support_required": serializer.data["is_support_required"],
|
||||
"is_telemetry_enabled": serializer.data["is_telemetry_enabled"],
|
||||
"version": serializer.data["version"],
|
||||
}
|
||||
),
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class TransferPrimaryOwnerEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
InstanceOwnerPermission,
|
||||
]
|
||||
|
||||
# Transfer the owner of the instance
|
||||
def post(self, request):
|
||||
instance = Instance.objects.first()
|
||||
|
||||
# Get the email of the new user
|
||||
email = request.data.get("email", False)
|
||||
if not email:
|
||||
return Response(
|
||||
{"error": "User is required"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
# Get users
|
||||
user = User.objects.get(email=email)
|
||||
|
||||
# Save the instance user
|
||||
instance.primary_owner = user
|
||||
instance.primary_email = user.email
|
||||
instance.save(update_fields=["owner", "email"])
|
||||
|
||||
# Add the user to admin
|
||||
_ = InstanceAdmin.objects.get_or_create(
|
||||
instance=instance,
|
||||
user=user,
|
||||
role=20,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"message": "Owner successfully updated"}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
|
||||
class InstanceAdminEndpoint(BaseAPIView):
|
||||
def get_permissions(self):
|
||||
if self.request.method in ["POST", "DELETE"]:
|
||||
self.permission_classes = [
|
||||
InstanceOwnerPermission,
|
||||
]
|
||||
else:
|
||||
self.permission_classes = [
|
||||
InstanceAdminPermission,
|
||||
]
|
||||
return super(InstanceAdminEndpoint, self).get_permissions()
|
||||
permission_classes = [
|
||||
InstanceAdminPermission,
|
||||
]
|
||||
|
||||
# Create an instance admin
|
||||
def post(self, request):
|
||||
email = request.data.get("email", False)
|
||||
role = request.data.get("role", 15)
|
||||
role = request.data.get("role", 20)
|
||||
|
||||
if not email:
|
||||
return Response(
|
||||
@@ -230,18 +205,289 @@ class InstanceConfigurationEndpoint(BaseAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def patch(self, request):
|
||||
configurations = InstanceConfiguration.objects.filter(key__in=request.data.keys())
|
||||
configurations = InstanceConfiguration.objects.filter(
|
||||
key__in=request.data.keys()
|
||||
)
|
||||
|
||||
bulk_configurations = []
|
||||
for configuration in configurations:
|
||||
configuration.value = request.data.get(configuration.key, configuration.value)
|
||||
value = request.data.get(configuration.key, configuration.value)
|
||||
if value is not None and configuration.key in [
|
||||
"OPENAI_API_KEY",
|
||||
"GITHUB_CLIENT_SECRET",
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
"UNSPLASH_ACESS_KEY",
|
||||
]:
|
||||
configuration.value = encrypt_data(value)
|
||||
else:
|
||||
configuration.value = value
|
||||
bulk_configurations.append(configuration)
|
||||
|
||||
InstanceConfiguration.objects.bulk_update(
|
||||
bulk_configurations,
|
||||
["value"],
|
||||
batch_size=100
|
||||
bulk_configurations, ["value"], batch_size=100
|
||||
)
|
||||
|
||||
serializer = InstanceConfigurationSerializer(configurations, many=True)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
def get_tokens_for_user(user):
|
||||
refresh = RefreshToken.for_user(user)
|
||||
return (
|
||||
str(refresh.access_token),
|
||||
str(refresh),
|
||||
)
|
||||
|
||||
|
||||
class AdminMagicSignInGenerateEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
email = request.data.get("email", False)
|
||||
|
||||
# Check the instance registration
|
||||
instance = Instance.objects.first()
|
||||
if instance is None:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if InstanceAdmin.objects.first():
|
||||
return Response(
|
||||
{"error": "Admin for this instance is already registered"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not email:
|
||||
return Response(
|
||||
{"error": "Please provide a valid email address"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Clean up
|
||||
email = email.strip().lower()
|
||||
validate_email(email)
|
||||
|
||||
# check if the email exists
|
||||
if not User.objects.filter(email=email).exists():
|
||||
# Create a user
|
||||
_ = User.objects.create(
|
||||
email=email,
|
||||
username=uuid.uuid4().hex,
|
||||
password=make_password(uuid.uuid4().hex),
|
||||
is_password_autoset=True,
|
||||
)
|
||||
|
||||
## Generate a random token
|
||||
token = (
|
||||
"".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
+ "-"
|
||||
+ "".join(random.choices(string.ascii_lowercase, k=4))
|
||||
)
|
||||
|
||||
ri = redis_instance()
|
||||
|
||||
key = "magic_" + str(email)
|
||||
|
||||
# Check if the key already exists in python
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
current_attempt = data["current_attempt"] + 1
|
||||
|
||||
if data["current_attempt"] > 2:
|
||||
return Response(
|
||||
{"error": "Max attempts exhausted. Please try again later."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
value = {
|
||||
"current_attempt": current_attempt,
|
||||
"email": email,
|
||||
"token": token,
|
||||
}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
else:
|
||||
value = {"current_attempt": 0, "email": email, "token": token}
|
||||
expiry = 600
|
||||
|
||||
ri.set(key, json.dumps(value), ex=expiry)
|
||||
|
||||
# If the smtp is configured send through here
|
||||
current_site = request.META.get("HTTP_ORIGIN")
|
||||
magic_link.delay(email, key, token, current_site)
|
||||
|
||||
return Response({"key": key}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class AdminSetupMagicSignInEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
user_token = request.data.get("token", "").strip()
|
||||
key = request.data.get("key", "").strip().lower()
|
||||
|
||||
if not key or user_token == "":
|
||||
return Response(
|
||||
{"error": "User token and key are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if InstanceAdmin.objects.first():
|
||||
return Response(
|
||||
{"error": "Admin for this instance is already registered"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
ri = redis_instance()
|
||||
|
||||
if ri.exists(key):
|
||||
data = json.loads(ri.get(key))
|
||||
|
||||
token = data["token"]
|
||||
email = data["email"]
|
||||
|
||||
if str(token) == str(user_token):
|
||||
# get the user
|
||||
user = User.objects.get(email=email)
|
||||
# get the email
|
||||
user.is_active = True
|
||||
user.is_email_verified = True
|
||||
user.last_active = timezone.now()
|
||||
user.last_login_time = timezone.now()
|
||||
user.last_login_ip = request.META.get("REMOTE_ADDR")
|
||||
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
|
||||
user.token_updated_at = timezone.now()
|
||||
user.save()
|
||||
|
||||
access_token, refresh_token = get_tokens_for_user(user)
|
||||
data = {
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
}
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"error": "Your login code was incorrect. Please try again."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
else:
|
||||
return Response(
|
||||
{"error": "The magic code/link has expired please try again"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
|
||||
class AdminSetUserPasswordEndpoint(BaseAPIView):
|
||||
def post(self, request):
|
||||
user = User.objects.get(pk=request.user.id)
|
||||
password = request.data.get("password", False)
|
||||
|
||||
# If the user password is not autoset then return error
|
||||
if not user.is_password_autoset:
|
||||
return Response(
|
||||
{
|
||||
"error": "Your password is already set please change your password from profile"
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check password validation
|
||||
if not password and len(str(password)) < 8:
|
||||
return Response(
|
||||
{"error": "Password is not valid"}, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
instance = Instance.objects.first()
|
||||
if instance is None:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Save the user in control center
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
_ = requests.patch(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps({"is_setup_done": True}),
|
||||
)
|
||||
|
||||
# Also register the user as admin
|
||||
_ = requests.post(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/users/register/",
|
||||
headers=headers,
|
||||
data=json.dumps(
|
||||
{
|
||||
"email": str(user.email),
|
||||
"signup_mode": "MAGIC_CODE",
|
||||
"is_admin": True,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Register the user as an instance admin
|
||||
_ = InstanceAdmin.objects.create(
|
||||
user=user,
|
||||
instance=instance,
|
||||
)
|
||||
# Make the setup flag True
|
||||
instance.is_setup_done = True
|
||||
instance.save()
|
||||
|
||||
# Set the user password
|
||||
user.set_password(password)
|
||||
user.is_password_autoset = False
|
||||
user.save()
|
||||
serializer = UserSerializer(user)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class SignUpScreenVisitedEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def post(self, request):
|
||||
instance = Instance.objects.first()
|
||||
|
||||
if instance is None:
|
||||
return Response(
|
||||
{"error": "Instance is not configured"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not instance.is_signup_screen_visited:
|
||||
instance.is_signup_screen_visited = True
|
||||
instance.save()
|
||||
# set the headers
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-instance-id": instance.instance_id,
|
||||
"x-api-key": instance.api_key,
|
||||
}
|
||||
# create the payload
|
||||
payload = {"is_signup_screen_visited": True}
|
||||
_ = requests.patch(
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -2,48 +2,120 @@
|
||||
import os
|
||||
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.license.models import InstanceConfiguration
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Configure instance variables"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
config_keys = {
|
||||
# Authentication Settings
|
||||
"GOOGLE_CLIENT_ID": os.environ.get("GOOGLE_CLIENT_ID"),
|
||||
"GOOGLE_CLIENT_SECRET": os.environ.get("GOOGLE_CLIENT_SECRET"),
|
||||
"GITHUB_CLIENT_ID": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
"GITHUB_CLIENT_SECRET": os.environ.get("GITHUB_CLIENT_SECRET"),
|
||||
"ENABLE_SIGNUP": os.environ.get("ENABLE_SIGNUP", "1"),
|
||||
"ENABLE_EMAIL_PASSWORD": os.environ.get("ENABLE_EMAIL_PASSWORD", "1"),
|
||||
"ENABLE_MAGIC_LINK_LOGIN": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0"),
|
||||
# Email Settings
|
||||
"EMAIL_HOST": os.environ.get("EMAIL_HOST", ""),
|
||||
"EMAIL_HOST_USER": os.environ.get("EMAIL_HOST_USER", ""),
|
||||
"EMAIL_HOST_PASSWORD": os.environ.get("EMAIL_HOST_PASSWORD"),
|
||||
"EMAIL_PORT": os.environ.get("EMAIL_PORT", "587"),
|
||||
"EMAIL_FROM": os.environ.get("EMAIL_FROM", ""),
|
||||
"EMAIL_USE_TLS": os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
"EMAIL_USE_SSL": os.environ.get("EMAIL_USE_SSL", "0"),
|
||||
# Open AI Settings
|
||||
"OPENAI_API_BASE": os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1"),
|
||||
"OPENAI_API_KEY": os.environ.get("OPENAI_API_KEY", ""),
|
||||
"GPT_ENGINE": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
# Unsplash Access Key
|
||||
"UNSPLASH_ACCESS_KEY": os.environ.get("UNSPLASH_ACESS_KEY", "")
|
||||
}
|
||||
from plane.license.utils.encryption import encrypt_data
|
||||
|
||||
for key, value in config_keys.items():
|
||||
config_keys = [
|
||||
# Authentication Settings
|
||||
{
|
||||
"key": "ENABLE_SIGNUP",
|
||||
"value": os.environ.get("ENABLE_SIGNUP", "1"),
|
||||
"category": "AUTHENTICATION",
|
||||
},
|
||||
{
|
||||
"key": "ENABLE_EMAIL_PASSWORD",
|
||||
"value": os.environ.get("ENABLE_EMAIL_PASSWORD", "1"),
|
||||
"category": "AUTHENTICATION",
|
||||
},
|
||||
{
|
||||
"key": "ENABLE_MAGIC_LINK_LOGIN",
|
||||
"value": os.environ.get("ENABLE_MAGIC_LINK_LOGIN", "0"),
|
||||
"category": "AUTHENTICATION",
|
||||
},
|
||||
{
|
||||
"key": "GOOGLE_CLIENT_ID",
|
||||
"value": os.environ.get("GOOGLE_CLIENT_ID"),
|
||||
"category": "GOOGLE",
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_ID",
|
||||
"value": os.environ.get("GITHUB_CLIENT_ID"),
|
||||
"category": "GITHUB",
|
||||
},
|
||||
{
|
||||
"key": "GITHUB_CLIENT_SECRET",
|
||||
"value": encrypt_data(os.environ.get("GITHUB_CLIENT_SECRET"))
|
||||
if os.environ.get("GITHUB_CLIENT_SECRET")
|
||||
else None,
|
||||
"category": "GITHUB",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_HOST",
|
||||
"value": os.environ.get("EMAIL_HOST", ""),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_HOST_USER",
|
||||
"value": os.environ.get("EMAIL_HOST_USER", ""),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_HOST_PASSWORD",
|
||||
"value": encrypt_data(os.environ.get("EMAIL_HOST_PASSWORD"))
|
||||
if os.environ.get("EMAIL_HOST_PASSWORD")
|
||||
else None,
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_PORT",
|
||||
"value": os.environ.get("EMAIL_PORT", "587"),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_FROM",
|
||||
"value": os.environ.get("EMAIL_FROM", ""),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "EMAIL_USE_TLS",
|
||||
"value": os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "OPENAI_API_KEY",
|
||||
"value": encrypt_data(os.environ.get("OPENAI_API_KEY"))
|
||||
if os.environ.get("OPENAI_API_KEY")
|
||||
else None,
|
||||
"category": "OPENAI",
|
||||
},
|
||||
{
|
||||
"key": "GPT_ENGINE",
|
||||
"value": os.environ.get("GPT_ENGINE", "gpt-3.5-turbo"),
|
||||
"category": "SMTP",
|
||||
},
|
||||
{
|
||||
"key": "UNSPLASH_ACCESS_KEY",
|
||||
"value": encrypt_data(os.environ.get("UNSPLASH_ACESS_KEY", ""))
|
||||
if os.environ.get("UNSPLASH_ACESS_KEY")
|
||||
else None,
|
||||
"category": "UNSPLASH",
|
||||
},
|
||||
]
|
||||
|
||||
for item in config_keys:
|
||||
obj, created = InstanceConfiguration.objects.get_or_create(
|
||||
key=key
|
||||
key=item.get("key")
|
||||
)
|
||||
if created:
|
||||
obj.value = value
|
||||
obj.value = item.get("value")
|
||||
obj.category = item.get("category")
|
||||
obj.save()
|
||||
self.stdout.write(self.style.SUCCESS(f"{key} loaded with value from environment variable."))
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"{obj.key} loaded with value from environment variable."
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(f"{key} configuration already exists"))
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"{obj.key} configuration already exists")
|
||||
)
|
||||
|
||||
@@ -2,22 +2,24 @@
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
import uuid
|
||||
|
||||
# Django imports
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import validate_email
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.license.models import Instance
|
||||
from plane.db.models import User
|
||||
from plane.license.models import Instance, InstanceAdmin
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Check if instance in registered else register"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# Positional argument
|
||||
parser.add_argument('machine_signature', type=str, help='Machine signature')
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Check if the instance is registered
|
||||
instance = Instance.objects.first()
|
||||
@@ -28,40 +30,23 @@ class Command(BaseCommand):
|
||||
# Load JSON content from the file
|
||||
data = json.load(file)
|
||||
|
||||
admin_email = os.environ.get("ADMIN_EMAIL")
|
||||
machine_signature = options.get("machine_signature", False)
|
||||
|
||||
|
||||
try:
|
||||
validate_email(admin_email)
|
||||
except ValidationError:
|
||||
CommandError(f"{admin_email} is not a valid ADMIN_EMAIL")
|
||||
|
||||
# Raise an exception if the admin email is not provided
|
||||
if not admin_email:
|
||||
raise CommandError("ADMIN_EMAIL is required")
|
||||
|
||||
# Check if the admin email user exists
|
||||
user = User.objects.filter(email=admin_email).first()
|
||||
|
||||
# If the user does not exist create the user and add him to the database
|
||||
if user is None:
|
||||
user = User.objects.create(email=admin_email, username=uuid.uuid4().hex)
|
||||
user.set_password(admin_email)
|
||||
user.save()
|
||||
|
||||
license_engine_base_url = os.environ.get("LICENSE_ENGINE_BASE_URL")
|
||||
|
||||
if not license_engine_base_url:
|
||||
raise CommandError("LICENSE_ENGINE_BASE_URL is required")
|
||||
if not machine_signature:
|
||||
raise CommandError("Machine signature is required")
|
||||
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
payload = {
|
||||
"email": user.email,
|
||||
"instance_key": settings.INSTANCE_KEY,
|
||||
"version": data.get("version", 0.1),
|
||||
"machine_signature": machine_signature,
|
||||
"user_count": User.objects.filter(is_bot=False).count(),
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{license_engine_base_url}/api/instances/",
|
||||
f"{settings.LICENSE_ENGINE_BASE_URL}/api/instances/",
|
||||
headers=headers,
|
||||
data=json.dumps(payload),
|
||||
)
|
||||
@@ -75,20 +60,13 @@ class Command(BaseCommand):
|
||||
license_key=data.get("license_key"),
|
||||
api_key=data.get("api_key"),
|
||||
version=data.get("version"),
|
||||
primary_email=data.get("email"),
|
||||
primary_owner=user,
|
||||
last_checked_at=timezone.now(),
|
||||
)
|
||||
# Create instance admin
|
||||
_ = InstanceAdmin.objects.create(
|
||||
user=user,
|
||||
instance=instance,
|
||||
role=20,
|
||||
user_count=data.get("user_count", 0),
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Instance successfully registered with owner: {instance.primary_owner.email}"
|
||||
f"Instance successfully registered"
|
||||
)
|
||||
)
|
||||
return
|
||||
@@ -96,7 +74,7 @@ class Command(BaseCommand):
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Instance already registered with instance owner: {instance.primary_owner.email}"
|
||||
f"Instance already registered"
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Generated by Django 4.2.5 on 2023-11-15 14:22
|
||||
# Generated by Django 4.2.7 on 2023-11-29 14:39
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
@@ -27,13 +27,14 @@ class Migration(migrations.Migration):
|
||||
('license_key', models.CharField(blank=True, max_length=256, null=True)),
|
||||
('api_key', models.CharField(max_length=16)),
|
||||
('version', models.CharField(max_length=10)),
|
||||
('primary_email', models.CharField(max_length=256)),
|
||||
('last_checked_at', models.DateTimeField()),
|
||||
('namespace', models.CharField(blank=True, max_length=50, null=True)),
|
||||
('is_telemetry_enabled', models.BooleanField(default=True)),
|
||||
('is_support_required', models.BooleanField(default=True)),
|
||||
('is_setup_done', models.BooleanField(default=False)),
|
||||
('is_signup_screen_visited', models.BooleanField(default=False)),
|
||||
('user_count', models.PositiveBigIntegerField(default=0)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('primary_owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instance_primary_owner', to=settings.AUTH_USER_MODEL)),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
],
|
||||
options={
|
||||
@@ -51,6 +52,7 @@ class Migration(migrations.Migration):
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('key', models.CharField(max_length=100, unique=True)),
|
||||
('value', models.TextField(blank=True, default=None, null=True)),
|
||||
('category', models.TextField()),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
],
|
||||
@@ -67,7 +69,7 @@ class Migration(migrations.Migration):
|
||||
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
|
||||
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
|
||||
('role', models.PositiveIntegerField(choices=[(20, 'Owner'), (15, 'Admin')], default=15)),
|
||||
('role', models.PositiveIntegerField(choices=[(20, 'Admin')], default=20)),
|
||||
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
|
||||
('instance', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='admins', to='license.instance')),
|
||||
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
|
||||
@@ -78,6 +80,7 @@ class Migration(migrations.Migration):
|
||||
'verbose_name_plural': 'Instance Admins',
|
||||
'db_table': 'instance_admins',
|
||||
'ordering': ('-created_at',),
|
||||
'unique_together': {('instance', 'user')},
|
||||
},
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
# Generated by Django 4.2.5 on 2023-11-16 09:45
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('license', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name='instanceadmin',
|
||||
unique_together={('instance', 'user')},
|
||||
),
|
||||
]
|
||||
@@ -4,11 +4,9 @@ from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import BaseModel
|
||||
from plane.db.mixins import AuditModel
|
||||
|
||||
ROLE_CHOICES = (
|
||||
(20, "Owner"),
|
||||
(15, "Admin"),
|
||||
(20, "Admin"),
|
||||
)
|
||||
|
||||
|
||||
@@ -20,20 +18,18 @@ class Instance(BaseModel):
|
||||
license_key = models.CharField(max_length=256, null=True, blank=True)
|
||||
api_key = models.CharField(max_length=16)
|
||||
version = models.CharField(max_length=10)
|
||||
# User information
|
||||
primary_email = models.CharField(max_length=256)
|
||||
primary_owner = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
related_name="instance_primary_owner",
|
||||
)
|
||||
# Instnace specifics
|
||||
last_checked_at = models.DateTimeField()
|
||||
namespace = models.CharField(max_length=50, blank=True, null=True)
|
||||
# telemetry and support
|
||||
is_telemetry_enabled = models.BooleanField(default=True)
|
||||
is_support_required = models.BooleanField(default=True)
|
||||
# is setup done
|
||||
is_setup_done = models.BooleanField(default=False)
|
||||
# signup screen
|
||||
is_signup_screen_visited = models.BooleanField(default=False)
|
||||
# users
|
||||
user_count = models.PositiveBigIntegerField(default=0)
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Instance"
|
||||
@@ -50,7 +46,7 @@ class InstanceAdmin(BaseModel):
|
||||
related_name="instance_owner",
|
||||
)
|
||||
instance = models.ForeignKey(Instance, on_delete=models.CASCADE, related_name="admins")
|
||||
role = models.PositiveIntegerField(choices=ROLE_CHOICES, default=15)
|
||||
role = models.PositiveIntegerField(choices=ROLE_CHOICES, default=20)
|
||||
|
||||
class Meta:
|
||||
unique_together = ["instance", "user"]
|
||||
@@ -64,6 +60,7 @@ class InstanceConfiguration(BaseModel):
|
||||
# The instance configuration variables
|
||||
key = models.CharField(max_length=100, unique=True)
|
||||
value = models.TextField(null=True, blank=True, default=None)
|
||||
category = models.TextField()
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Instance Configuration"
|
||||
|
||||
@@ -2,9 +2,12 @@ from django.urls import path
|
||||
|
||||
from plane.license.api.views import (
|
||||
InstanceEndpoint,
|
||||
TransferPrimaryOwnerEndpoint,
|
||||
InstanceAdminEndpoint,
|
||||
InstanceConfigurationEndpoint,
|
||||
AdminMagicSignInGenerateEndpoint,
|
||||
AdminSetupMagicSignInEndpoint,
|
||||
AdminSetUserPasswordEndpoint,
|
||||
SignUpScreenVisitedEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -13,11 +16,6 @@ urlpatterns = [
|
||||
InstanceEndpoint.as_view(),
|
||||
name="instance",
|
||||
),
|
||||
path(
|
||||
"instances/transfer-primary-owner/",
|
||||
TransferPrimaryOwnerEndpoint.as_view(),
|
||||
name="instance",
|
||||
),
|
||||
path(
|
||||
"instances/admins/",
|
||||
InstanceAdminEndpoint.as_view(),
|
||||
@@ -33,4 +31,24 @@ urlpatterns = [
|
||||
InstanceConfigurationEndpoint.as_view(),
|
||||
name="instance-configuration",
|
||||
),
|
||||
path(
|
||||
"instances/admins/magic-generate/",
|
||||
AdminMagicSignInGenerateEndpoint.as_view(),
|
||||
name="instance-admins",
|
||||
),
|
||||
path(
|
||||
"instances/admins/magic-sign-in/",
|
||||
AdminSetupMagicSignInEndpoint.as_view(),
|
||||
name="instance-admins",
|
||||
),
|
||||
path(
|
||||
"instances/admins/set-password/",
|
||||
AdminSetUserPasswordEndpoint.as_view(),
|
||||
name="instance-admins",
|
||||
),
|
||||
path(
|
||||
"instances/admins/sign-up-screen-visited/",
|
||||
SignUpScreenVisitedEndpoint.as_view(),
|
||||
name="instance-sign-up",
|
||||
),
|
||||
]
|
||||
|
||||
22
apiserver/plane/license/utils/encryption.py
Normal file
22
apiserver/plane/license/utils/encryption.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import base64
|
||||
import hashlib
|
||||
from django.conf import settings
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
|
||||
def derive_key(secret_key):
|
||||
# Use a key derivation function to get a suitable encryption key
|
||||
dk = hashlib.pbkdf2_hmac('sha256', secret_key.encode(), b'salt', 100000)
|
||||
return base64.urlsafe_b64encode(dk)
|
||||
|
||||
|
||||
def encrypt_data(data):
|
||||
cipher_suite = Fernet(derive_key(settings.SECRET_KEY))
|
||||
encrypted_data = cipher_suite.encrypt(data.encode())
|
||||
return encrypted_data.decode() # Convert bytes to string
|
||||
|
||||
|
||||
def decrypt_data(encrypted_data):
|
||||
cipher_suite = Fernet(derive_key(settings.SECRET_KEY))
|
||||
decrypted_data = cipher_suite.decrypt(encrypted_data.encode()) # Convert string back to bytes
|
||||
return decrypted_data.decode()
|
||||
@@ -1,6 +1,63 @@
|
||||
import os
|
||||
|
||||
|
||||
# Helper function to return value from the passed key
|
||||
def get_configuration_value(query, key, default=None):
|
||||
for item in query:
|
||||
if item['key'] == key:
|
||||
if item["key"] == key:
|
||||
return item.get("value", default)
|
||||
return default
|
||||
|
||||
|
||||
def get_email_configuration(instance_configuration):
|
||||
# Get the configuration variables
|
||||
EMAIL_HOST_USER = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_USER",
|
||||
os.environ.get("EMAIL_HOST_USER", None),
|
||||
)
|
||||
|
||||
EMAIL_HOST_PASSWORD = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST_PASSWORD",
|
||||
os.environ.get("EMAIL_HOST_PASSWORD", None),
|
||||
)
|
||||
|
||||
EMAIL_HOST = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_HOST",
|
||||
os.environ.get("EMAIL_HOST", None),
|
||||
)
|
||||
|
||||
EMAIL_FROM = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", None),
|
||||
)
|
||||
|
||||
EMAIL_USE_TLS = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_USE_TLS",
|
||||
os.environ.get("EMAIL_USE_TLS", "1"),
|
||||
)
|
||||
|
||||
EMAIL_PORT = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_PORT",
|
||||
587,
|
||||
)
|
||||
|
||||
EMAIL_FROM = get_configuration_value(
|
||||
instance_configuration,
|
||||
"EMAIL_FROM",
|
||||
os.environ.get("EMAIL_FROM", "Team Plane <team@mailer.plane.so>"),
|
||||
)
|
||||
|
||||
return (
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
)
|
||||
|
||||
@@ -75,10 +75,6 @@ REST_FRAMEWORK = {
|
||||
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
|
||||
"DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",),
|
||||
"DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",),
|
||||
"DEFAULT_THROTTLE_CLASSES": ("plane.api.rate_limit.ApiKeyRateThrottle",),
|
||||
"DEFAULT_THROTTLE_RATES": {
|
||||
"api_key": "60/minute",
|
||||
},
|
||||
}
|
||||
|
||||
# Django Auth Backend
|
||||
@@ -189,6 +185,9 @@ AUTH_PASSWORD_VALIDATORS = [
|
||||
},
|
||||
]
|
||||
|
||||
# Password reset time the number of seconds the uniquely generated uid will be valid
|
||||
PASSWORD_RESET_TIMEOUT = 3600
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
STATIC_URL = "/static/"
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, "static-assets", "collected-static")
|
||||
@@ -240,7 +239,7 @@ if AWS_S3_ENDPOINT_URL:
|
||||
|
||||
# JWT Auth Configuration
|
||||
SIMPLE_JWT = {
|
||||
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=10080),
|
||||
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=43200),
|
||||
"REFRESH_TOKEN_LIFETIME": timedelta(days=43200),
|
||||
"ROTATE_REFRESH_TOKENS": False,
|
||||
"BLACKLIST_AFTER_ROTATION": False,
|
||||
@@ -310,7 +309,6 @@ if bool(os.environ.get("SENTRY_DSN", False)):
|
||||
PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False) # For External
|
||||
SLACK_BOT_TOKEN = os.environ.get("SLACK_BOT_TOKEN", False)
|
||||
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
|
||||
ENABLE_SIGNUP = os.environ.get("ENABLE_SIGNUP", "1") == "1"
|
||||
|
||||
# Unsplash Access key
|
||||
UNSPLASH_ACCESS_KEY = os.environ.get("UNSPLASH_ACCESS_KEY")
|
||||
@@ -326,4 +324,10 @@ USE_MINIO = int(os.environ.get("USE_MINIO", 0)) == 1
|
||||
|
||||
# Posthog settings
|
||||
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False)
|
||||
POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False)
|
||||
POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False)
|
||||
|
||||
# License engine base url
|
||||
LICENSE_ENGINE_BASE_URL = os.environ.get("LICENSE_ENGINE_BASE_URL", "https://control-center.plane.so")
|
||||
|
||||
# instance key
|
||||
INSTANCE_KEY = os.environ.get("INSTANCE_KEY", "ae6517d563dfc13d8270bd45cf17b08f70b37d989128a9dab46ff687603333c3")
|
||||
|
||||
@@ -4,6 +4,7 @@ from django.urls import path
|
||||
from plane.space.views import (
|
||||
ProjectDeployBoardPublicSettingsEndpoint,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
ProjectIssuesPublicGroupedEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
@@ -17,4 +18,9 @@ urlpatterns = [
|
||||
ProjectIssuesPublicEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
path(
|
||||
"v3/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
|
||||
ProjectIssuesPublicGroupedEndpoint.as_view(),
|
||||
name="project-deploy-board",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -10,6 +10,7 @@ from .issue import (
|
||||
IssueVotePublicViewSet,
|
||||
IssueRetrievePublicEndpoint,
|
||||
ProjectIssuesPublicEndpoint,
|
||||
ProjectIssuesPublicGroupedEndpoint,
|
||||
)
|
||||
|
||||
from .inbox import InboxIssuePublicViewSet
|
||||
|
||||
@@ -653,4 +653,165 @@ class ProjectIssuesPublicEndpoint(BaseAPIView):
|
||||
"labels": labels,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
|
||||
class ProjectIssuesPublicGroupedEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
AllowAny,
|
||||
]
|
||||
|
||||
def get(self, request, slug, project_id):
|
||||
project_deploy_board = ProjectDeployBoard.objects.get(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
)
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
|
||||
# Custom ordering for priority and state
|
||||
priority_order = ["urgent", "high", "medium", "low", "none"]
|
||||
state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
order_by_param = request.GET.get("order_by", "-created_at")
|
||||
|
||||
issue_queryset = (
|
||||
Issue.issue_objects.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.select_related("project", "workspace", "state", "parent")
|
||||
.prefetch_related("assignees", "labels")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"votes",
|
||||
queryset=IssueVote.objects.select_related("actor"),
|
||||
)
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(module_id=F("issue_module__module_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
if order_by_param == "priority" or order_by_param == "-priority":
|
||||
priority_order = (
|
||||
priority_order if order_by_param == "priority" else priority_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
priority_order=Case(
|
||||
*[
|
||||
When(priority=p, then=Value(i))
|
||||
for i, p in enumerate(priority_order)
|
||||
],
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("priority_order")
|
||||
|
||||
# State Ordering
|
||||
elif order_by_param in [
|
||||
"state__name",
|
||||
"state__group",
|
||||
"-state__name",
|
||||
"-state__group",
|
||||
]:
|
||||
state_order = (
|
||||
state_order
|
||||
if order_by_param in ["state__name", "state__group"]
|
||||
else state_order[::-1]
|
||||
)
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
state_order=Case(
|
||||
*[
|
||||
When(state__group=state_group, then=Value(i))
|
||||
for i, state_group in enumerate(state_order)
|
||||
],
|
||||
default=Value(len(state_order)),
|
||||
output_field=CharField(),
|
||||
)
|
||||
).order_by("state_order")
|
||||
# assignee and label ordering
|
||||
elif order_by_param in [
|
||||
"labels__name",
|
||||
"-labels__name",
|
||||
"assignees__first_name",
|
||||
"-assignees__first_name",
|
||||
]:
|
||||
issue_queryset = issue_queryset.annotate(
|
||||
max_values=Max(
|
||||
order_by_param[1::]
|
||||
if order_by_param.startswith("-")
|
||||
else order_by_param
|
||||
)
|
||||
).order_by(
|
||||
"-max_values" if order_by_param.startswith("-") else "max_values"
|
||||
)
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
|
||||
state_group_order = [
|
||||
"backlog",
|
||||
"unstarted",
|
||||
"started",
|
||||
"completed",
|
||||
"cancelled",
|
||||
]
|
||||
|
||||
states = (
|
||||
State.objects.filter(
|
||||
~Q(name="Triage"),
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
.annotate(
|
||||
custom_order=Case(
|
||||
*[
|
||||
When(group=value, then=Value(index))
|
||||
for index, value in enumerate(state_group_order)
|
||||
],
|
||||
default=Value(len(state_group_order)),
|
||||
output_field=IntegerField(),
|
||||
),
|
||||
)
|
||||
.values("name", "group", "color", "id")
|
||||
.order_by("custom_order", "sequence")
|
||||
)
|
||||
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug, project_id=project_id
|
||||
).values("id", "name", "color", "parent")
|
||||
|
||||
issues = IssuePublicSerializer(issue_queryset, many=True).data
|
||||
issue_dict = {str(issue["id"]): issue for issue in issues}
|
||||
state_dict = {str(state["id"]): state for state in states}
|
||||
label_dict = {str(label["id"]): label for label in labels}
|
||||
|
||||
return Response(
|
||||
{
|
||||
"issues": issue_dict,
|
||||
"states": state_dict,
|
||||
"labels": label_dict,
|
||||
},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
@@ -36,4 +36,5 @@ scout-apm==2.26.1
|
||||
openpyxl==3.1.2
|
||||
beautifulsoup4==4.12.2
|
||||
dj-database-url==2.1.0
|
||||
posthog==3.0.2
|
||||
posthog==3.0.2
|
||||
cryptography==41.0.5
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<p>
|
||||
Dear {{first_name}},<br /><br />
|
||||
Welcome! Your account has been created.
|
||||
Verify your email by clicking on the link below <br />
|
||||
{{verification_url}}
|
||||
successfully.<br /><br />
|
||||
</p>
|
||||
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,9 +0,0 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
<html>
|
||||
Dear {{username}},<br/>
|
||||
Your requested Issue's data has been successfully exported from Plane. The export includes all relevant information about issues you requested from your selected projects.</br>
|
||||
Please find the attachment and download the CSV file. If you have any questions or need further assistance, please don't hesitate to contact our support team at <a href = "mailto: engineering@plane.com">engineering@plane.so</a>. We're here to help!</br>
|
||||
Thank you for using Plane. We hope this export will aid you in effectively managing your projects.</br>
|
||||
Regards,
|
||||
Team Plane
|
||||
</html>
|
||||
File diff suppressed because it is too large
Load Diff
8
deploy/coolify/README.md
Normal file
8
deploy/coolify/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
## Coolify Setup
|
||||
|
||||
Access the `coolify-docker-compose` file [here](https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml) or download using using below command
|
||||
|
||||
```
|
||||
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/coolify/coolify-docker-compose.yml
|
||||
|
||||
```
|
||||
232
deploy/coolify/coolify-docker-compose.yml
Normal file
232
deploy/coolify/coolify-docker-compose.yml
Normal file
@@ -0,0 +1,232 @@
|
||||
|
||||
services:
|
||||
web:
|
||||
container_name: web
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-frontend:latest
|
||||
restart: always
|
||||
command: /usr/local/bin/start.sh web/server.js web
|
||||
environment:
|
||||
- NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0}
|
||||
- NEXT_PUBLIC_DEPLOY_URL=$SERVICE_FQDN_SPACE_8082
|
||||
depends_on:
|
||||
- api
|
||||
- worker
|
||||
|
||||
space:
|
||||
container_name: space
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-space:latest
|
||||
restart: always
|
||||
command: /usr/local/bin/start.sh space/server.js space
|
||||
environment:
|
||||
- SERVICE_FQDN_SPACE_8082=/api
|
||||
- NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0}
|
||||
depends_on:
|
||||
- api
|
||||
- worker
|
||||
- web
|
||||
|
||||
api:
|
||||
container_name: api
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-backend:latest
|
||||
restart: always
|
||||
command: ./bin/takeoff
|
||||
environment:
|
||||
- DEBUG=${DEBUG:-0}
|
||||
- SENTRY_DSN=${SENTRY_DSN:-""}
|
||||
- PGUSER=${PGUSER:-plane}
|
||||
- PGPASSWORD=${PGPASSWORD:-plane}
|
||||
- PGHOST=${PGHOST:-plane-db}
|
||||
- PGDATABASE=${PGDATABASE:-plane}
|
||||
- DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
- REDIS_HOST=${REDIS_HOST:-plane-redis}
|
||||
- REDIS_PORT=${REDIS_PORT:-6379}
|
||||
- REDIS_URL=redis://${REDIS_HOST}:6379/
|
||||
- EMAIL_HOST=${EMAIL_HOST:-""}
|
||||
- EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
|
||||
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
|
||||
- EMAIL_PORT=${EMAIL_PORT:-587}
|
||||
- EMAIL_FROM=${EMAIL_FROM:-Team Plane <team@mailer.plane.so>}
|
||||
- EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
|
||||
- EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
|
||||
- AWS_REGION=${AWS_REGION:-""}
|
||||
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
|
||||
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
|
||||
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
|
||||
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
|
||||
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
|
||||
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
|
||||
- GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
|
||||
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
|
||||
- DOCKERIZED=${DOCKERIZED:-1}
|
||||
- USE_MINIO=${USE_MINIO:-1}
|
||||
- NGINX_PORT=${NGINX_PORT:-8082}
|
||||
- DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
|
||||
- DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
|
||||
- ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
|
||||
- ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1}
|
||||
- ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0}
|
||||
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
|
||||
- WEB_URL=$SERVICE_FQDN_PLANE_8082
|
||||
depends_on:
|
||||
- plane-db
|
||||
- plane-redis
|
||||
|
||||
worker:
|
||||
container_name: bgworker
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-backend:latest
|
||||
restart: always
|
||||
command: ./bin/worker
|
||||
environment:
|
||||
- DEBUG=${DEBUG:-0}
|
||||
- SENTRY_DSN=${SENTRY_DSN:-""}
|
||||
- PGUSER=${PGUSER:-plane}
|
||||
- PGPASSWORD=${PGPASSWORD:-plane}
|
||||
- PGHOST=${PGHOST:-plane-db}
|
||||
- PGDATABASE=${PGDATABASE:-plane}
|
||||
- DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
- REDIS_HOST=${REDIS_HOST:-plane-redis}
|
||||
- REDIS_PORT=${REDIS_PORT:-6379}
|
||||
- REDIS_URL=redis://${REDIS_HOST}:6379/
|
||||
- EMAIL_HOST=${EMAIL_HOST:-""}
|
||||
- EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
|
||||
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
|
||||
- EMAIL_PORT=${EMAIL_PORT:-587}
|
||||
- EMAIL_FROM=${EMAIL_FROM:-Team Plane <team@mailer.plane.so>}
|
||||
- EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
|
||||
- EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
|
||||
- AWS_REGION=${AWS_REGION:-""}
|
||||
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
|
||||
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
|
||||
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
|
||||
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
|
||||
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
|
||||
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
|
||||
- GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
|
||||
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
|
||||
- DOCKERIZED=${DOCKERIZED:-1}
|
||||
- USE_MINIO=${USE_MINIO:-1}
|
||||
- NGINX_PORT=${NGINX_PORT:-8082}
|
||||
- DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
|
||||
- DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
|
||||
- ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
|
||||
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
|
||||
depends_on:
|
||||
- api
|
||||
- plane-db
|
||||
- plane-redis
|
||||
|
||||
beat-worker:
|
||||
container_name: beatworker
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-backend:latest
|
||||
restart: always
|
||||
command: ./bin/beat
|
||||
environment:
|
||||
- DEBUG=${DEBUG:-0}
|
||||
- SENTRY_DSN=${SENTRY_DSN:-""}
|
||||
- PGUSER=${PGUSER:-plane}
|
||||
- PGPASSWORD=${PGPASSWORD:-plane}
|
||||
- PGHOST=${PGHOST:-plane-db}
|
||||
- PGDATABASE=${PGDATABASE:-plane}
|
||||
- DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
|
||||
- REDIS_HOST=${REDIS_HOST:-plane-redis}
|
||||
- REDIS_PORT=${REDIS_PORT:-6379}
|
||||
- REDIS_URL=redis://${REDIS_HOST}:6379/
|
||||
- EMAIL_HOST=${EMAIL_HOST:-""}
|
||||
- EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
|
||||
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
|
||||
- EMAIL_PORT=${EMAIL_PORT:-587}
|
||||
- EMAIL_FROM=${EMAIL_FROM:-Team Plane <team@mailer.plane.so>}
|
||||
- EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
|
||||
- EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
|
||||
- AWS_REGION=${AWS_REGION:-""}
|
||||
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
|
||||
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
|
||||
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}
|
||||
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
|
||||
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
|
||||
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-sk-}
|
||||
- GPT_ENGINE=${GPT_ENGINE:-gpt-3.5-turbo}
|
||||
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
|
||||
- DOCKERIZED=${DOCKERIZED:-1}
|
||||
- USE_MINIO=${USE_MINIO:-1}
|
||||
- NGINX_PORT=${NGINX_PORT:-8082}
|
||||
- DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
|
||||
- DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123}
|
||||
- ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
|
||||
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
|
||||
depends_on:
|
||||
- api
|
||||
- plane-db
|
||||
- plane-redis
|
||||
|
||||
plane-db:
|
||||
container_name: plane-db
|
||||
image: postgres:15.2-alpine
|
||||
restart: always
|
||||
command: postgres -c 'max_connections=1000'
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_USER:-plane}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane}
|
||||
- POSTGRES_DB=${POSTGRES_DB:-plane}
|
||||
- PGDATA=${PGDATA:-/var/lib/postgresql/data}
|
||||
|
||||
plane-redis:
|
||||
container_name: plane-redis
|
||||
image: redis:6.2.7-alpine
|
||||
restart: always
|
||||
volumes:
|
||||
- redisdata:/data
|
||||
|
||||
plane-minio:
|
||||
container_name: plane-minio
|
||||
image: minio/minio
|
||||
restart: always
|
||||
command: server /export --console-address ":9090"
|
||||
volumes:
|
||||
- uploads:/export
|
||||
environment:
|
||||
- MINIO_ROOT_USER=${MINIO_ROOT_USER:-access-key}
|
||||
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-secret-key}
|
||||
|
||||
createbuckets:
|
||||
image: minio/mc
|
||||
entrypoint: >
|
||||
/bin/sh -c " /usr/bin/mc config host add plane-minio http://plane-minio:9000 \$AWS_ACCESS_KEY_ID \$AWS_SECRET_ACCESS_KEY; /usr/bin/mc mb plane-minio/\$AWS_S3_BUCKET_NAME; /usr/bin/mc anonymous set download plane-minio/\$AWS_S3_BUCKET_NAME; exit 0; "
|
||||
environment:
|
||||
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-access-key}
|
||||
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-secret-key}
|
||||
- AWS_S3_BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
|
||||
depends_on:
|
||||
- plane-minio
|
||||
|
||||
# Comment this if you already have a reverse proxy running
|
||||
proxy:
|
||||
container_name: proxy
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-proxy:latest
|
||||
ports:
|
||||
- 8082:80
|
||||
environment:
|
||||
- SERVICE_FQDN_PLANE_8082
|
||||
- NGINX_PORT=${NGINX_PORT:-8082}
|
||||
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
|
||||
- BUCKET_NAME=${AWS_S3_BUCKET_NAME:-uploads}
|
||||
depends_on:
|
||||
- web
|
||||
- api
|
||||
- space
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
redisdata:
|
||||
uploads:
|
||||
8
deploy/kubernetes/README.md
Normal file
8
deploy/kubernetes/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
|
||||
# Helm Chart
|
||||
|
||||
Click on the below link to access the helm chart instructions.
|
||||
|
||||
[](https://artifacthub.io/packages/search?repo=makeplane)
|
||||
|
||||
|
||||
309
deploy/selfhost/README.md
Normal file
309
deploy/selfhost/README.md
Normal file
@@ -0,0 +1,309 @@
|
||||
# Self Hosting
|
||||
|
||||
In this guide, we will walk you through the process of setting up a self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization.
|
||||
|
||||
We will cover two main options for setting up your self-hosted environment: using a cloud server or using your desktop. For the cloud server, we will use an AWS EC2 instance. For the desktop, we will use Docker to create a local environment.
|
||||
|
||||
Let's get started!
|
||||
|
||||
## Setting up Docker Environment
|
||||
<details>
|
||||
<summary>Option 1 - Using Cloud Server</summary>
|
||||
<p>Best way to start is to create EC2 maching on AWS. It must of minimum t3.medium/t3a/medium</p>
|
||||
<p>Run the below command to install docker engine.</p>
|
||||
|
||||
```curl -fsSL https://get.docker.com -o install-docker.sh```
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
<details>
|
||||
<summary>Option 2 - Using Desktop</summary>
|
||||
|
||||
#### For Mac
|
||||
<ol>
|
||||
<li> Download Docker Desktop for Mac from the <a href="https://hub.docker.com/editions/community/docker-ce-desktop-mac/" target="_blank">Docker Hub</a>. </li>
|
||||
<li> Double-click the downloaded `.dmg` file and drag the Docker app icon to the Applications folder. </li>
|
||||
<li>Open Docker Desktop from the Applications folder. You might be asked to provide your system password to install additional software.</li>
|
||||
</ol>
|
||||
|
||||
#### For Windows:
|
||||
<ol>
|
||||
<li>Download Docker Desktop for Windows from the <a href="https://hub.docker.com/editions/community/docker-ce-desktop-windows/" target="_blank">Docker Hub</a>.</li>
|
||||
<li>Run the installer and follow the instructions. You might be asked to enable Hyper-V and "Containers" Windows features.</li>
|
||||
<li>Open Docker Desktop. You might be asked to log out and log back in, or restart your machine, for changes to take effect.</li>
|
||||
</ol>
|
||||
|
||||
After installation, you can verify the installation by opening a terminal (Command Prompt on Windows, Terminal app on Mac) and running the command `docker --version`. This should display the installed version of Docker.
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
## Installing Plane
|
||||
|
||||
Installing plane is a very easy and minimal step process.
|
||||
|
||||
### Prerequisite
|
||||
- Docker installed and running
|
||||
- OS with bash scripting enabled (Ubuntu, Linux AMI, macos). Windows systems need to have [gitbash](https://git-scm.com/download/win)
|
||||
- User context used must have access to docker services. In most cases, use sudo su to switch as root user
|
||||
- Use the terminal (or gitbash) window to run all the future steps
|
||||
|
||||
### Downloading Latest Stable Release
|
||||
|
||||
```
|
||||
mkdir plane-selfhost
|
||||
|
||||
cd plane-selfhost
|
||||
|
||||
curl -fsSL -o setup.sh https://raw.githubusercontent.com/makeplane/plane/master/deploy/selfhost/install.sh
|
||||
|
||||
chmod +x setup.sh
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Downloading Preview Release</summary>
|
||||
|
||||
```
|
||||
mkdir plane-selfhost
|
||||
|
||||
cd plane-selfhost
|
||||
|
||||
export RELEASE=preview
|
||||
|
||||
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/install.sh | sed 's@BRANCH=master@BRANCH='"$RELEASE"'@' > setup.sh
|
||||
|
||||
chmod +x setup.sh
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
### Proceed with setup
|
||||
|
||||
Above steps will set you ready to install and start plane services.
|
||||
|
||||
Lets get started by running the `./setup.sh` command.
|
||||
|
||||
This will prompt you with the below options.
|
||||
|
||||
```
|
||||
Select a Action you want to perform:
|
||||
1) Install
|
||||
2) Start
|
||||
3) Stop
|
||||
4) Restart
|
||||
5) Upgrade
|
||||
6) Exit
|
||||
|
||||
Action [2]: 1
|
||||
```
|
||||
|
||||
For the 1st time setup, type "1" as action input.
|
||||
|
||||
This will create a create a folder `plane-app` or `plane-app-preview` (in case of preview deployment) and will download 2 files inside that
|
||||
- `docker-compose.yaml`
|
||||
- `.env`
|
||||
|
||||
Again the `options [1-6]` will be popped up and this time hit `6` to exit.
|
||||
|
||||
---
|
||||
|
||||
### Continue with setup - Environment Settings
|
||||
|
||||
Before proceeding, we suggest used to review `.env` file and set the values.
|
||||
Below are the most import keys you must refer to. *<span style="color: #fcba03">You can use any text editor to edit this file</span>*.
|
||||
|
||||
> `NGINX_PORT` - This is default set to `80`. Make sure the port you choose to use is not preoccupied. (e.g `NGINX_PORT=8080`)
|
||||
|
||||
> `WEB_URL` - This is default set to `http://localhost`. Change this to the FQDN you plan to use along with NGINX_PORT (eg. `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`)
|
||||
|
||||
> `CORS_ALLOWED_ORIGINS` - This is default set to `http://localhost`. Change this to the FQDN you plan to use along with NGINX_PORT (eg. `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`)
|
||||
|
||||
There are many other settings you can play with, but we suggest you configure `EMAIL SETTINGS` as it will enable you to invite your teammates onto the platform.
|
||||
|
||||
---
|
||||
|
||||
### Continue with setup - Start Server
|
||||
|
||||
Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `2` to start the sevices
|
||||
|
||||
```
|
||||
Select a Action you want to perform:
|
||||
1) Install
|
||||
2) Start
|
||||
3) Stop
|
||||
4) Restart
|
||||
5) Upgrade
|
||||
6) Exit
|
||||
|
||||
Action [2]: 2
|
||||
```
|
||||
|
||||
Expect something like this.
|
||||

|
||||
|
||||
Be patient as it might take sometime based on download speed and system configuration. If all goes well, you must see something like this
|
||||
|
||||

|
||||
|
||||
This is the confirmation that all images were downloaded and the services are up & running.
|
||||
|
||||
You have successfully self hosted `Plane` instance. Access the application by going to IP or domain you have configured it (e.g `https://plane.example.com:8080` or `http://[IP-ADDRESS]:8080`)
|
||||
|
||||
---
|
||||
|
||||
### Stopping the Server
|
||||
|
||||
In case you want to make changes to `.env` variables, we suggest you to stop the services before doing that.
|
||||
|
||||
Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `3` to stop the sevices
|
||||
|
||||
```
|
||||
Select a Action you want to perform:
|
||||
1) Install
|
||||
2) Start
|
||||
3) Stop
|
||||
4) Restart
|
||||
5) Upgrade
|
||||
6) Exit
|
||||
|
||||
Action [2]: 3
|
||||
```
|
||||
|
||||
If all goes well, you must see something like this
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
### Restarting the Server
|
||||
|
||||
In case you want to make changes to `.env` variables, without stopping the server or you noticed some abnormalies in services, you can restart the services with RESTART option.
|
||||
|
||||
Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `4` to restart the sevices
|
||||
|
||||
```
|
||||
Select a Action you want to perform:
|
||||
1) Install
|
||||
2) Start
|
||||
3) Stop
|
||||
4) Restart
|
||||
5) Upgrade
|
||||
6) Exit
|
||||
|
||||
Action [2]: 4
|
||||
```
|
||||
|
||||
If all goes well, you must see something like this
|
||||
|
||||

|
||||
|
||||
---
|
||||
|
||||
### Upgrading Plane Version
|
||||
|
||||
It is always advised to keep Plane up to date with the latest release.
|
||||
|
||||
Lets again run the `./setup.sh` command. You will again be prompted with the below options. This time select `5` to upgrade the release.
|
||||
|
||||
```
|
||||
Select a Action you want to perform:
|
||||
1) Install
|
||||
2) Start
|
||||
3) Stop
|
||||
4) Restart
|
||||
5) Upgrade
|
||||
6) Exit
|
||||
|
||||
Action [2]: 5
|
||||
```
|
||||
|
||||
By choosing this, it will stop the services and then will download the latest `docker-compose.yaml` and `variables-upgrade.env`. Here system will not replace `.env` with the new one.
|
||||
|
||||
You must expect the below message
|
||||
|
||||

|
||||
|
||||
Once done, choose `6` to exit from prompt.
|
||||
|
||||
> It is very important for you to compare the 2 files `variables-upgrade.env` and `.env`. Copy the newly added variable from downloaded file to `.env` and set the expected values.
|
||||
|
||||
Once done with making changes in `.env` file, jump on to `Start Server`
|
||||
|
||||
|
||||
## Upgrading from v0.13.2 to v0.14.x
|
||||
|
||||
This is one time activity for users who are upgrading from v0.13.2 to v0.14.0
|
||||
|
||||
As there has been significant changes to Self Hosting process, this step mainly covers the data migration from current (v0.13.2) docker volumes from newly created volumes
|
||||
|
||||
> Before we begin with migration, make sure your v0.14.0 was started and then stopped. This is required to know the newly created docker volume names.
|
||||
|
||||
Begin with downloading the migration script using below command
|
||||
|
||||
```
|
||||
|
||||
curl -fsSL -o migrate.sh https://raw.githubusercontent.com/makeplane/plane/master/deploy/selfhost/migration-0.13-0.14.sh
|
||||
|
||||
chmod +x migrate.sh
|
||||
|
||||
```
|
||||
|
||||
Now run the `./migrate.sh` command and expect the instructions as below
|
||||
|
||||
```
|
||||
******************************************************************
|
||||
|
||||
This script is solely for the migration purpose only.
|
||||
This is a 1 time migration of volume data from v0.13.2 => v0.14.x
|
||||
|
||||
Assumption:
|
||||
1. Postgres data volume name ends with _pgdata
|
||||
2. Minio data volume name ends with _uploads
|
||||
3. Redis data volume name ends with _redisdata
|
||||
|
||||
Any changes to this script can break the migration.
|
||||
|
||||
Before you proceed, make sure you run the below command
|
||||
to know the docker volumes
|
||||
|
||||
docker volume ls -q | grep -i "_pgdata"
|
||||
docker volume ls -q | grep -i "_uploads"
|
||||
docker volume ls -q | grep -i "_redisdata"
|
||||
|
||||
*******************************************************
|
||||
|
||||
Given below list of REDIS volumes, identify the prefix of source and destination volumes leaving "_redisdata"
|
||||
---------------------
|
||||
plane-app_redisdata
|
||||
v0132_redisdata
|
||||
|
||||
Provide the Source Volume Prefix :
|
||||
```
|
||||
|
||||
**Open another terminal window**, and run the mentioned 3 command. This may be different for users who have changed the volume names in their previous setup (v0.13.2)
|
||||
|
||||
For every command you must see 2 records something like shown in above example of `redisdata`
|
||||
|
||||
To move forward, you would need PREFIX of old setup and new setup. As per above example, `v0132` is the prefix of v0.13.2 and `plane-app` is the prefix of v0.14.0 setup
|
||||
|
||||
**Back to original terminal window**, *Provide the Source Volume Prefix* and hit ENTER.
|
||||
|
||||
Now you will be prompted to *Provide Destination Volume Prefix*. Provide the value and hit ENTER
|
||||
|
||||
```
|
||||
Provide the Source Volume Prefix : v0132
|
||||
Provide the Destination Volume Prefix : plane-app
|
||||
```
|
||||
|
||||
In case the suffixes are wrong or the mentioned volumes are not found, you will receive the error shown below. The image below displays an error for source volumes.
|
||||
|
||||

|
||||
|
||||
In case of successful migration, it will be a silent exit without error.
|
||||
|
||||
Now its time to restart v0.14.0 setup.
|
||||
|
||||
|
||||
@@ -5,16 +5,16 @@ x-app-env : &app-env
|
||||
- NGINX_PORT=${NGINX_PORT:-80}
|
||||
- WEB_URL=${WEB_URL:-http://localhost}
|
||||
- DEBUG=${DEBUG:-0}
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.selfhosted} # deprecated
|
||||
- NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0}
|
||||
- NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces}
|
||||
- DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.production} # deprecated
|
||||
- NEXT_PUBLIC_ENABLE_OAUTH=${NEXT_PUBLIC_ENABLE_OAUTH:-0} # deprecated
|
||||
- NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces} # deprecated
|
||||
- SENTRY_DSN=${SENTRY_DSN:-""}
|
||||
- SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"}
|
||||
- GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-""}
|
||||
- GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-""}
|
||||
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
|
||||
- DOCKERIZED=${DOCKERIZED:-1} # deprecated
|
||||
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-""}
|
||||
- SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"}
|
||||
- ADMIN_EMAIL=${ADMIN_EMAIL:-""}
|
||||
- LICENSE_ENGINE_BASE_URL=${LICENSE_ENGINE_BASE_URL:-""}
|
||||
# Gunicorn Workers
|
||||
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-2}
|
||||
#DB SETTINGS
|
||||
@@ -29,12 +29,12 @@ x-app-env : &app-env
|
||||
- REDIS_HOST=${REDIS_HOST:-plane-redis}
|
||||
- REDIS_PORT=${REDIS_PORT:-6379}
|
||||
- REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/}
|
||||
# EMAIL SETTINGS
|
||||
# EMAIL SETTINGS - Deprecated can be configured through admin panel
|
||||
- EMAIL_HOST=${EMAIL_HOST:-""}
|
||||
- EMAIL_HOST_USER=${EMAIL_HOST_USER:-""}
|
||||
- EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""}
|
||||
- EMAIL_PORT=${EMAIL_PORT:-587}
|
||||
- EMAIL_FROM=${EMAIL_FROM:-"Team Plane <team@mailer.plane.so>"}
|
||||
- EMAIL_FROM=${EMAIL_FROM:-"Team Plane <team@mailer.plane.so>"}
|
||||
- EMAIL_USE_TLS=${EMAIL_USE_TLS:-1}
|
||||
- EMAIL_USE_SSL=${EMAIL_USE_SSL:-0}
|
||||
- DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so}
|
||||
@@ -43,10 +43,11 @@ x-app-env : &app-env
|
||||
- OPENAI_API_BASE=${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
- OPENAI_API_KEY=${OPENAI_API_KEY:-"sk-"}
|
||||
- GPT_ENGINE=${GPT_ENGINE:-"gpt-3.5-turbo"}
|
||||
# LOGIN/SIGNUP SETTINGS
|
||||
# LOGIN/SIGNUP SETTINGS - Deprecated can be configured through admin panel
|
||||
- ENABLE_SIGNUP=${ENABLE_SIGNUP:-1}
|
||||
- ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1}
|
||||
- ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0}
|
||||
# Application secret
|
||||
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
|
||||
# DATA STORE SETTINGS
|
||||
- USE_MINIO=${USE_MINIO:-1}
|
||||
@@ -102,7 +103,6 @@ services:
|
||||
|
||||
worker:
|
||||
<<: *app-env
|
||||
container_name: bgworker
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-backend:${APP_RELEASE:-latest}
|
||||
restart: unless-stopped
|
||||
@@ -114,7 +114,6 @@ services:
|
||||
|
||||
beat-worker:
|
||||
<<: *app-env
|
||||
container_name: beatworker
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-backend:${APP_RELEASE:-latest}
|
||||
restart: unless-stopped
|
||||
@@ -126,7 +125,6 @@ services:
|
||||
|
||||
plane-db:
|
||||
<<: *app-env
|
||||
container_name: plane-db
|
||||
image: postgres:15.2-alpine
|
||||
restart: unless-stopped
|
||||
command: postgres -c 'max_connections=1000'
|
||||
@@ -135,7 +133,6 @@ services:
|
||||
|
||||
plane-redis:
|
||||
<<: *app-env
|
||||
container_name: plane-redis
|
||||
image: redis:6.2.7-alpine
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -143,7 +140,6 @@ services:
|
||||
|
||||
plane-minio:
|
||||
<<: *app-env
|
||||
container_name: plane-minio
|
||||
image: minio/minio
|
||||
restart: unless-stopped
|
||||
command: server /export --console-address ":9090"
|
||||
@@ -153,7 +149,6 @@ services:
|
||||
# Comment this if you already have a reverse proxy running
|
||||
proxy:
|
||||
<<: *app-env
|
||||
container_name: proxy
|
||||
platform: linux/amd64
|
||||
image: makeplane/plane-proxy:${APP_RELEASE:-latest}
|
||||
ports:
|
||||
|
||||
BIN
deploy/selfhost/images/download.png
Normal file
BIN
deploy/selfhost/images/download.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 22 KiB |
BIN
deploy/selfhost/images/migrate-error.png
Normal file
BIN
deploy/selfhost/images/migrate-error.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 KiB |
BIN
deploy/selfhost/images/restart.png
Normal file
BIN
deploy/selfhost/images/restart.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
BIN
deploy/selfhost/images/started.png
Normal file
BIN
deploy/selfhost/images/started.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 21 KiB |
BIN
deploy/selfhost/images/stopped.png
Normal file
BIN
deploy/selfhost/images/stopped.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 16 KiB |
BIN
deploy/selfhost/images/upgrade.png
Normal file
BIN
deploy/selfhost/images/upgrade.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 54 KiB |
125
deploy/selfhost/install-private.sh
Executable file
125
deploy/selfhost/install-private.sh
Executable file
@@ -0,0 +1,125 @@
|
||||
#!/bin/bash
|
||||
|
||||
BRANCH=develop
|
||||
SCRIPT_DIR=$PWD
|
||||
PLANE_INSTALL_DIR=$PWD/plane-app-private
|
||||
|
||||
function install(){
|
||||
echo
|
||||
echo "Installing on $PLANE_INSTALL_DIR"
|
||||
download
|
||||
}
|
||||
function download(){
|
||||
cd $SCRIPT_DIR
|
||||
TS=$(date +%s)
|
||||
if [ -f "$PLANE_INSTALL_DIR/docker-compose.yaml" ]
|
||||
then
|
||||
mv $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/archive/$TS.docker-compose.yaml
|
||||
fi
|
||||
|
||||
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s)
|
||||
curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/makeplane/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s)
|
||||
|
||||
if [ -f "$PLANE_INSTALL_DIR/.env" ];
|
||||
then
|
||||
cp $PLANE_INSTALL_DIR/.env $PLANE_INSTALL_DIR/archive/$TS.env
|
||||
else
|
||||
mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env
|
||||
fi
|
||||
|
||||
cp $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/temp.yaml
|
||||
sed -e 's@plane-frontend:@plane-frontend-private:@g' \
|
||||
-e 's@plane-space:@plane-space-private:@g' \
|
||||
-e 's@plane-backend:@plane-backend-private:@g' \
|
||||
-e 's@plane-proxy:@plane-proxy-private:@g' \
|
||||
-e 's@${APP_RELEASE:-latest}@'"$BRANCH"'@g' \
|
||||
$PLANE_INSTALL_DIR/temp.yaml > $PLANE_INSTALL_DIR/docker-compose.yaml
|
||||
|
||||
rm $PLANE_INSTALL_DIR/temp.yaml
|
||||
|
||||
echo ""
|
||||
echo "Latest version is now available for you to use"
|
||||
echo ""
|
||||
echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in '.env 'file."
|
||||
echo ""
|
||||
|
||||
}
|
||||
function startServices(){
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose up -d
|
||||
cd $SCRIPT_DIR
|
||||
}
|
||||
function stopServices(){
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose down
|
||||
cd $SCRIPT_DIR
|
||||
}
|
||||
function restartServices(){
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose restart
|
||||
cd $SCRIPT_DIR
|
||||
}
|
||||
function upgrade(){
|
||||
echo "***** STOPPING SERVICES ****"
|
||||
stopServices
|
||||
|
||||
echo
|
||||
echo "***** DOWNLOADING LATEST VERSION ****"
|
||||
download
|
||||
|
||||
echo "***** PLEASE VALIDATE AND START SERVICES ****"
|
||||
|
||||
}
|
||||
function askForAction(){
|
||||
echo
|
||||
echo "Select a Action you want to perform:"
|
||||
echo " 1) Install"
|
||||
echo " 2) Start"
|
||||
echo " 3) Stop"
|
||||
echo " 4) Restart"
|
||||
echo " 5) Upgrade"
|
||||
echo " 6) Exit"
|
||||
echo
|
||||
read -p "Action [2]: " ACTION
|
||||
until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do
|
||||
echo "$ACTION: invalid selection."
|
||||
read -p "Action [2]: " ACTION
|
||||
done
|
||||
echo
|
||||
|
||||
|
||||
if [ "$ACTION" == "1" ]
|
||||
then
|
||||
install
|
||||
askForAction
|
||||
elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ]
|
||||
then
|
||||
startServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "3" ]
|
||||
then
|
||||
stopServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "4" ]
|
||||
then
|
||||
restartServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "5" ]
|
||||
then
|
||||
upgrade
|
||||
askForAction
|
||||
elif [ "$ACTION" == "6" ]
|
||||
then
|
||||
exit 0
|
||||
else
|
||||
echo "INVALID ACTION SUPPLIED"
|
||||
fi
|
||||
}
|
||||
|
||||
if [ "$BRANCH" != "master" ];
|
||||
then
|
||||
PLANE_INSTALL_DIR=$PWD/plane-app-private-$(echo $BRANCH | sed -r 's@(\/|" "|\.)@-@g')
|
||||
fi
|
||||
mkdir -p $PLANE_INSTALL_DIR/archive
|
||||
|
||||
askForAction
|
||||
@@ -30,12 +30,7 @@ function download(){
|
||||
if [ "$BRANCH" != "master" ];
|
||||
then
|
||||
cp $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/temp.yaml
|
||||
|
||||
sed -e 's@plane-frontend:@plane-frontend-private:@g' \
|
||||
-e 's@plane-space:@plane-space-private:@g' \
|
||||
-e 's@plane-backend:@plane-backend-private:@g' \
|
||||
-e 's@plane-proxy:@plane-proxy-private:@g' \
|
||||
-e 's@${APP_RELEASE:-latest}@'"$BRANCH"'@g' \
|
||||
sed -e 's@${APP_RELEASE:-latest}@'"$BRANCH"'@g' \
|
||||
$PLANE_INSTALL_DIR/temp.yaml > $PLANE_INSTALL_DIR/docker-compose.yaml
|
||||
|
||||
rm $PLANE_INSTALL_DIR/temp.yaml
|
||||
|
||||
118
deploy/selfhost/migration-0.13-0.14.sh
Executable file
118
deploy/selfhost/migration-0.13-0.14.sh
Executable file
@@ -0,0 +1,118 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo '
|
||||
******************************************************************
|
||||
|
||||
This script is solely for the migration purpose only.
|
||||
This is a 1 time migration of volume data from v0.13.2 => v0.14.x
|
||||
|
||||
Assumption:
|
||||
1. Postgres data volume name ends with _pgdata
|
||||
2. Minio data volume name ends with _uploads
|
||||
3. Redis data volume name ends with _redisdata
|
||||
|
||||
Any changes to this script can break the migration.
|
||||
|
||||
Before you proceed, make sure you run the below command
|
||||
to know the docker volumes
|
||||
|
||||
docker volume ls -q | grep -i "_pgdata"
|
||||
docker volume ls -q | grep -i "_uploads"
|
||||
docker volume ls -q | grep -i "_redisdata"
|
||||
|
||||
*******************************************************
|
||||
'
|
||||
|
||||
DOWNLOAD_FOL=./download
|
||||
rm -rf ${DOWNLOAD_FOL}
|
||||
mkdir -p ${DOWNLOAD_FOL}
|
||||
|
||||
function volumeExists {
|
||||
if [ "$(docker volume ls -f name=$1 | awk '{print $NF}' | grep -E '^'$1'$')" ]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
function readPrefixes(){
|
||||
echo ''
|
||||
echo 'Given below list of REDIS volumes, identify the prefix of source and destination volumes leaving "_redisdata" '
|
||||
echo '---------------------'
|
||||
docker volume ls -q | grep -i "_redisdata"
|
||||
echo ''
|
||||
|
||||
read -p "Provide the Source Volume Prefix : " SRC_VOL_PREFIX
|
||||
until [ "$SRC_VOL_PREFIX" ]; do
|
||||
read -p "Provide the Source Volume Prefix : " SRC_VOL_PREFIX
|
||||
done
|
||||
|
||||
read -p "Provide the Destination Volume Prefix : " DEST_VOL_PREFIX
|
||||
until [ "$DEST_VOL_PREFIX" ]; do
|
||||
read -p "Provide the Source Volume Prefix : " DEST_VOL_PREFIX
|
||||
done
|
||||
|
||||
echo ''
|
||||
echo 'Prefix Provided '
|
||||
echo " Source : ${SRC_VOL_PREFIX}"
|
||||
echo " Destination : ${DEST_VOL_PREFIX}"
|
||||
echo '---------------------------------------'
|
||||
}
|
||||
|
||||
function migrate(){
|
||||
|
||||
SRC_VOLUME=${SRC_VOL_PREFIX}_${VOL_NAME_SUFFIX}
|
||||
DEST_VOLUME=${DEST_VOL_PREFIX}_${VOL_NAME_SUFFIX}
|
||||
|
||||
if volumeExists $SRC_VOLUME; then
|
||||
if volumeExists $DEST_VOLUME; then
|
||||
GOOD_TO_GO=1
|
||||
else
|
||||
echo "Destination Volume '$DEST_VOLUME' does not exist"
|
||||
echo ''
|
||||
fi
|
||||
else
|
||||
echo "Source Volume '$SRC_VOLUME' does not exist"
|
||||
echo ''
|
||||
fi
|
||||
|
||||
if [ $GOOD_TO_GO = 1 ]; then
|
||||
|
||||
echo "MIGRATING ${VOL_NAME_SUFFIX} FROM ${SRC_VOLUME} => ${DEST_VOLUME}"
|
||||
|
||||
TEMP_CONTAINER=$(docker run -d -v $SRC_VOLUME:$CONTAINER_VOL_FOLDER busybox true)
|
||||
docker cp -q $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX}
|
||||
docker rm $TEMP_CONTAINER &> /dev/null
|
||||
|
||||
TEMP_CONTAINER=$(docker run -d -v $DEST_VOLUME:$CONTAINER_VOL_FOLDER busybox true)
|
||||
if [ "$VOL_NAME_SUFFIX" = "pgdata" ]; then
|
||||
docker cp -q ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX} $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER/_temp
|
||||
docker run --rm -v $DEST_VOLUME:$CONTAINER_VOL_FOLDER \
|
||||
-e DATA_FOLDER="${CONTAINER_VOL_FOLDER}" \
|
||||
busybox /bin/sh -c 'cp -Rf $DATA_FOLDER/_temp/* $DATA_FOLDER '
|
||||
else
|
||||
docker cp -q ${DOWNLOAD_FOL}/${VOL_NAME_SUFFIX} $TEMP_CONTAINER:$CONTAINER_VOL_FOLDER
|
||||
fi
|
||||
docker rm $TEMP_CONTAINER &> /dev/null
|
||||
|
||||
echo ''
|
||||
fi
|
||||
}
|
||||
|
||||
readPrefixes
|
||||
|
||||
# MIGRATE DB
|
||||
CONTAINER_VOL_FOLDER=/var/lib/postgresql/data
|
||||
VOL_NAME_SUFFIX=pgdata
|
||||
migrate
|
||||
|
||||
# MIGRATE REDIS
|
||||
CONTAINER_VOL_FOLDER=/data
|
||||
VOL_NAME_SUFFIX=redisdata
|
||||
migrate
|
||||
|
||||
# MIGRATE MINIO
|
||||
CONTAINER_VOL_FOLDER=/export
|
||||
VOL_NAME_SUFFIX=uploads
|
||||
migrate
|
||||
|
||||
@@ -7,14 +7,15 @@ API_REPLICAS=1
|
||||
NGINX_PORT=80
|
||||
WEB_URL=http://localhost
|
||||
DEBUG=0
|
||||
DJANGO_SETTINGS_MODULE=plane.settings.selfhosted # deprecated
|
||||
NEXT_PUBLIC_ENABLE_OAUTH=0
|
||||
NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces
|
||||
SENTRY_DSN=""
|
||||
SENTRY_ENVIRONMENT="production"
|
||||
GOOGLE_CLIENT_ID=""
|
||||
GITHUB_CLIENT_ID=""
|
||||
GITHUB_CLIENT_SECRET=""
|
||||
DOCKERIZED=1 # deprecated
|
||||
CORS_ALLOWED_ORIGINS=""
|
||||
SENTRY_ENVIRONMENT="production"
|
||||
CORS_ALLOWED_ORIGINS="http://localhost"
|
||||
|
||||
#DB SETTINGS
|
||||
PGHOST=plane-db
|
||||
@@ -35,7 +36,7 @@ EMAIL_HOST=""
|
||||
EMAIL_HOST_USER=""
|
||||
EMAIL_HOST_PASSWORD=""
|
||||
EMAIL_PORT=587
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
|
||||
EMAIL_USE_TLS=1
|
||||
EMAIL_USE_SSL=0
|
||||
|
||||
@@ -64,9 +65,3 @@ FILE_SIZE_LIMIT=5242880
|
||||
|
||||
# Gunicorn Workers
|
||||
GUNICORN_WORKERS=2
|
||||
|
||||
# Admin Email
|
||||
ADMIN_EMAIL=""
|
||||
|
||||
# License Engine url
|
||||
LICENSE_ENGINE_BASE_URL=""
|
||||
@@ -30,7 +30,7 @@
|
||||
"turbo": "^1.10.16"
|
||||
},
|
||||
"resolutions": {
|
||||
"@types/react": "18.2.0"
|
||||
"@types/react": "18.2.39"
|
||||
},
|
||||
"packageManager": "yarn@1.22.19"
|
||||
}
|
||||
|
||||
@@ -19,17 +19,17 @@
|
||||
"scripts": {
|
||||
"build": "tsup",
|
||||
"dev": "tsup --watch",
|
||||
"check-types": "tsc --noEmit"
|
||||
"check-types": "tsc --noEmit",
|
||||
"format": "prettier --write \"**/*.{ts,tsx,md}\""
|
||||
},
|
||||
"peerDependencies": {
|
||||
"next": "12.3.2",
|
||||
"next-themes": "^0.2.1",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@blueprintjs/popover2": "^2.0.10",
|
||||
"@tiptap/core": "^2.1.7",
|
||||
"@plane/editor-types": "*",
|
||||
"@tiptap/extension-code-block-lowlight": "^2.1.12",
|
||||
"@tiptap/extension-color": "^2.1.11",
|
||||
"@tiptap/extension-image": "^2.1.7",
|
||||
@@ -49,29 +49,24 @@
|
||||
"@tiptap/react": "^2.1.7",
|
||||
"@tiptap/starter-kit": "^2.1.10",
|
||||
"@tiptap/suggestion": "^2.0.4",
|
||||
"@types/node": "18.15.3",
|
||||
"@types/react": "^18.2.5",
|
||||
"@types/react-dom": "18.0.11",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^1.2.1",
|
||||
"eslint": "8.36.0",
|
||||
"eslint-config-next": "13.2.4",
|
||||
"eventsource-parser": "^0.1.0",
|
||||
"highlight.js": "^11.8.0",
|
||||
"jsx-dom-cjs": "^8.0.3",
|
||||
"lowlight": "^3.0.0",
|
||||
"lucide-react": "^0.244.0",
|
||||
"prosemirror-async-query": "^0.0.4",
|
||||
"react-markdown": "^8.0.7",
|
||||
"react-moveable": "^0.54.2",
|
||||
"tailwind-merge": "^1.14.0",
|
||||
"tippy.js": "^6.3.7",
|
||||
"tiptap-markdown": "^0.8.2",
|
||||
"use-debounce": "^9.0.4"
|
||||
"tiptap-markdown": "^0.8.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.32.0",
|
||||
"postcss": "^8.4.29",
|
||||
"eslint-config-next": "13.2.4",
|
||||
"@types/node": "18.15.3",
|
||||
"@types/react": "^18.2.39",
|
||||
"@types/react-dom": "18.0.11",
|
||||
"tailwind-config-custom": "*",
|
||||
"tsconfig": "*",
|
||||
"tsup": "^7.2.0",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { UploadImage } from "@plane/editor-types";
|
||||
import { Editor, Range } from "@tiptap/core";
|
||||
import { UploadImage } from "../types/upload-image";
|
||||
import { startImageUpload } from "../ui/plugins/upload-image";
|
||||
import { findTableAncestor } from "./utils";
|
||||
|
||||
export const toggleHeadingOne = (editor: Editor, range?: Range) => {
|
||||
if (range)
|
||||
@@ -95,6 +96,15 @@ export const toggleBlockquote = (editor: Editor, range?: Range) => {
|
||||
};
|
||||
|
||||
export const insertTableCommand = (editor: Editor, range?: Range) => {
|
||||
if (typeof window !== "undefined") {
|
||||
const selection: any = window?.getSelection();
|
||||
if (selection.rangeCount !== 0) {
|
||||
const range = selection.getRangeAt(0);
|
||||
if (findTableAncestor(range.startContainer)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (range)
|
||||
editor
|
||||
.chain()
|
||||
|
||||
@@ -1,30 +1,33 @@
|
||||
import { getNodeType } from '@tiptap/core'
|
||||
import { NodeType } from '@tiptap/pm/model'
|
||||
import { EditorState } from '@tiptap/pm/state'
|
||||
import { getNodeType } from "@tiptap/core";
|
||||
import { NodeType } from "@tiptap/pm/model";
|
||||
import { EditorState } from "@tiptap/pm/state";
|
||||
|
||||
export const findListItemPos = (typeOrName: string | NodeType, state: EditorState) => {
|
||||
const { $from } = state.selection
|
||||
const nodeType = getNodeType(typeOrName, state.schema)
|
||||
export const findListItemPos = (
|
||||
typeOrName: string | NodeType,
|
||||
state: EditorState,
|
||||
) => {
|
||||
const { $from } = state.selection;
|
||||
const nodeType = getNodeType(typeOrName, state.schema);
|
||||
|
||||
let currentNode = null
|
||||
let currentDepth = $from.depth
|
||||
let currentPos = $from.pos
|
||||
let targetDepth: number | null = null
|
||||
let currentNode = null;
|
||||
let currentDepth = $from.depth;
|
||||
let currentPos = $from.pos;
|
||||
let targetDepth: number | null = null;
|
||||
|
||||
while (currentDepth > 0 && targetDepth === null) {
|
||||
currentNode = $from.node(currentDepth)
|
||||
currentNode = $from.node(currentDepth);
|
||||
|
||||
if (currentNode.type === nodeType) {
|
||||
targetDepth = currentDepth
|
||||
targetDepth = currentDepth;
|
||||
} else {
|
||||
currentDepth -= 1
|
||||
currentPos -= 1
|
||||
currentDepth -= 1;
|
||||
currentPos -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (targetDepth === null) {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
return { $pos: state.doc.resolve(currentPos), depth: targetDepth }
|
||||
}
|
||||
return { $pos: state.doc.resolve(currentPos), depth: targetDepth };
|
||||
};
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
import { EditorState } from '@tiptap/pm/state'
|
||||
import { EditorState } from "@tiptap/pm/state";
|
||||
|
||||
export const hasListBefore = (editorState: EditorState, name: string, parentListTypes: string[]) => {
|
||||
const { $anchor } = editorState.selection
|
||||
export const hasListBefore = (
|
||||
editorState: EditorState,
|
||||
name: string,
|
||||
parentListTypes: string[],
|
||||
) => {
|
||||
const { $anchor } = editorState.selection;
|
||||
|
||||
const previousNodePos = Math.max(0, $anchor.pos - 2)
|
||||
const previousNodePos = Math.max(0, $anchor.pos - 2);
|
||||
|
||||
const previousNode = editorState.doc.resolve(previousNodePos).node()
|
||||
const previousNode = editorState.doc.resolve(previousNodePos).node();
|
||||
|
||||
if (!previousNode || !parentListTypes.includes(previousNode.type.name)) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import { EditorState } from '@tiptap/pm/state'
|
||||
import { EditorState } from "@tiptap/pm/state";
|
||||
|
||||
export const hasListItemAfter = (typeOrName: string, state: EditorState): boolean => {
|
||||
const { $anchor } = state.selection
|
||||
export const hasListItemAfter = (
|
||||
typeOrName: string,
|
||||
state: EditorState,
|
||||
): boolean => {
|
||||
const { $anchor } = state.selection;
|
||||
|
||||
const $targetPos = state.doc.resolve($anchor.pos - $anchor.parentOffset - 2)
|
||||
const $targetPos = state.doc.resolve($anchor.pos - $anchor.parentOffset - 2);
|
||||
|
||||
if ($targetPos.index() === $targetPos.parent.childCount - 1) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
if ($targetPos.nodeAfter?.type.name !== typeOrName) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
@@ -1,17 +1,20 @@
|
||||
import { EditorState } from '@tiptap/pm/state'
|
||||
import { EditorState } from "@tiptap/pm/state";
|
||||
|
||||
export const hasListItemBefore = (typeOrName: string, state: EditorState): boolean => {
|
||||
const { $anchor } = state.selection
|
||||
export const hasListItemBefore = (
|
||||
typeOrName: string,
|
||||
state: EditorState,
|
||||
): boolean => {
|
||||
const { $anchor } = state.selection;
|
||||
|
||||
const $targetPos = state.doc.resolve($anchor.pos - 2)
|
||||
const $targetPos = state.doc.resolve($anchor.pos - 2);
|
||||
|
||||
if ($targetPos.index() === 0) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
if ($targetPos.nodeBefore?.type.name !== typeOrName) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
@@ -1,19 +1,135 @@
|
||||
import Image from "@tiptap/extension-image";
|
||||
import TrackImageDeletionPlugin from "../../plugins/delete-image";
|
||||
import { EditorState, Plugin, PluginKey, Transaction } from "@tiptap/pm/state";
|
||||
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
|
||||
import UploadImagesPlugin from "../../plugins/upload-image";
|
||||
import { DeleteImage } from "../../../types/delete-image";
|
||||
import ImageExt from "@tiptap/extension-image";
|
||||
import { onNodeDeleted, onNodeRestored } from "../../plugins/delete-image";
|
||||
import { DeleteImage, RestoreImage } from "@plane/editor-types";
|
||||
|
||||
interface ImageNode extends ProseMirrorNode {
|
||||
attrs: {
|
||||
src: string;
|
||||
id: string;
|
||||
};
|
||||
}
|
||||
|
||||
const deleteKey = new PluginKey("delete-image");
|
||||
const IMAGE_NODE_TYPE = "image";
|
||||
|
||||
const ImageExtension = (
|
||||
deleteImage: DeleteImage,
|
||||
restoreFile: RestoreImage,
|
||||
cancelUploadImage?: () => any,
|
||||
) =>
|
||||
Image.extend({
|
||||
ImageExt.extend({
|
||||
addProseMirrorPlugins() {
|
||||
return [
|
||||
UploadImagesPlugin(cancelUploadImage),
|
||||
TrackImageDeletionPlugin(deleteImage),
|
||||
new Plugin({
|
||||
key: deleteKey,
|
||||
appendTransaction: (
|
||||
transactions: readonly Transaction[],
|
||||
oldState: EditorState,
|
||||
newState: EditorState,
|
||||
) => {
|
||||
const newImageSources = new Set<string>();
|
||||
newState.doc.descendants((node) => {
|
||||
if (node.type.name === IMAGE_NODE_TYPE) {
|
||||
newImageSources.add(node.attrs.src);
|
||||
}
|
||||
});
|
||||
|
||||
transactions.forEach((transaction) => {
|
||||
// transaction could be a selection
|
||||
if (!transaction.docChanged) return;
|
||||
|
||||
const removedImages: ImageNode[] = [];
|
||||
|
||||
// iterate through all the nodes in the old state
|
||||
oldState.doc.descendants((oldNode, oldPos) => {
|
||||
// if the node is not an image, then return as no point in checking
|
||||
if (oldNode.type.name !== IMAGE_NODE_TYPE) return;
|
||||
|
||||
// Check if the node has been deleted or replaced
|
||||
if (!newImageSources.has(oldNode.attrs.src)) {
|
||||
removedImages.push(oldNode as ImageNode);
|
||||
}
|
||||
});
|
||||
|
||||
removedImages.forEach(async (node) => {
|
||||
const src = node.attrs.src;
|
||||
this.storage.images.set(src, true);
|
||||
await onNodeDeleted(src, deleteImage);
|
||||
});
|
||||
});
|
||||
|
||||
return null;
|
||||
},
|
||||
}),
|
||||
new Plugin({
|
||||
key: new PluginKey("imageRestoration"),
|
||||
appendTransaction: (
|
||||
transactions: readonly Transaction[],
|
||||
oldState: EditorState,
|
||||
newState: EditorState,
|
||||
) => {
|
||||
const oldImageSources = new Set<string>();
|
||||
oldState.doc.descendants((node) => {
|
||||
if (node.type.name === IMAGE_NODE_TYPE) {
|
||||
oldImageSources.add(node.attrs.src);
|
||||
}
|
||||
});
|
||||
|
||||
transactions.forEach((transaction) => {
|
||||
if (!transaction.docChanged) return;
|
||||
|
||||
const addedImages: ImageNode[] = [];
|
||||
|
||||
newState.doc.descendants((node, pos) => {
|
||||
if (node.type.name !== IMAGE_NODE_TYPE) return;
|
||||
if (pos < 0 || pos > newState.doc.content.size) return;
|
||||
if (oldImageSources.has(node.attrs.src)) return;
|
||||
addedImages.push(node as ImageNode);
|
||||
});
|
||||
|
||||
addedImages.forEach(async (image) => {
|
||||
const wasDeleted = this.storage.images.get(image.attrs.src);
|
||||
if (wasDeleted === undefined) {
|
||||
this.storage.images.set(image.attrs.src, false);
|
||||
} else if (wasDeleted === true) {
|
||||
await onNodeRestored(image.attrs.src, restoreFile);
|
||||
}
|
||||
});
|
||||
});
|
||||
return null;
|
||||
},
|
||||
}),
|
||||
];
|
||||
},
|
||||
|
||||
onCreate(this) {
|
||||
const imageSources = new Set<string>();
|
||||
this.editor.state.doc.descendants((node) => {
|
||||
if (node.type.name === IMAGE_NODE_TYPE) {
|
||||
imageSources.add(node.attrs.src);
|
||||
}
|
||||
});
|
||||
imageSources.forEach(async (src) => {
|
||||
try {
|
||||
const assetUrlWithWorkspaceId = new URL(src).pathname.substring(1);
|
||||
await restoreFile(assetUrlWithWorkspaceId);
|
||||
} catch (error) {
|
||||
console.error("Error restoring image: ", error);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
// storage to keep track of image states Map<src, isDeleted>
|
||||
addStorage() {
|
||||
return {
|
||||
images: new Map<string, boolean>(),
|
||||
};
|
||||
},
|
||||
|
||||
addAttributes() {
|
||||
return {
|
||||
...this.parent?.(),
|
||||
|
||||
@@ -15,14 +15,17 @@ import HorizontalRule from "./horizontal-rule";
|
||||
|
||||
import ImageExtension from "./image";
|
||||
|
||||
import { DeleteImage } from "../../types/delete-image";
|
||||
import { isValidHttpUrl } from "../../lib/utils";
|
||||
import { IMentionSuggestion } from "../../types/mention-suggestion";
|
||||
import { Mentions } from "../mentions";
|
||||
|
||||
import { CustomKeymap } from "./keymap";
|
||||
import { CustomCodeBlock } from "./code";
|
||||
import { ListKeymap } from "./custom-list-keymap";
|
||||
import {
|
||||
IMentionSuggestion,
|
||||
DeleteImage,
|
||||
RestoreImage,
|
||||
} from "@plane/editor-types";
|
||||
|
||||
export const CoreEditorExtensions = (
|
||||
mentionConfig: {
|
||||
@@ -30,6 +33,7 @@ export const CoreEditorExtensions = (
|
||||
mentionHighlights: string[];
|
||||
},
|
||||
deleteFile: DeleteImage,
|
||||
restoreFile: RestoreImage,
|
||||
cancelUploadImage?: () => any,
|
||||
) => [
|
||||
StarterKit.configure({
|
||||
@@ -71,7 +75,7 @@ export const CoreEditorExtensions = (
|
||||
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
|
||||
},
|
||||
}),
|
||||
ImageExtension(deleteFile, cancelUploadImage).configure({
|
||||
ImageExtension(deleteFile, restoreFile, cancelUploadImage).configure({
|
||||
HTMLAttributes: {
|
||||
class: "rounded-lg border border-custom-border-300",
|
||||
},
|
||||
|
||||
@@ -1,22 +1,27 @@
|
||||
import { useEditor as useCustomEditor, Editor } from "@tiptap/react";
|
||||
import { useImperativeHandle, useRef, MutableRefObject } from "react";
|
||||
import { DeleteImage } from "../../types/delete-image";
|
||||
import { CoreEditorProps } from "../props";
|
||||
import { CoreEditorExtensions } from "../extensions";
|
||||
import { EditorProps } from "@tiptap/pm/view";
|
||||
import { getTrimmedHTML } from "../../lib/utils";
|
||||
import { UploadImage } from "../../types/upload-image";
|
||||
import { useInitializedContent } from "./useInitializedContent";
|
||||
import { IMentionSuggestion } from "../../types/mention-suggestion";
|
||||
import {
|
||||
DeleteImage,
|
||||
IMentionSuggestion,
|
||||
RestoreImage,
|
||||
UploadImage,
|
||||
} from "@plane/editor-types";
|
||||
|
||||
interface CustomEditorProps {
|
||||
uploadFile: UploadImage;
|
||||
restoreFile: RestoreImage;
|
||||
deleteFile: DeleteImage;
|
||||
cancelUploadImage?: () => any;
|
||||
setIsSubmitting?: (
|
||||
isSubmitting: "submitting" | "submitted" | "saved",
|
||||
) => void;
|
||||
setShouldShowAlert?: (showAlert: boolean) => void;
|
||||
value: string;
|
||||
deleteFile: DeleteImage;
|
||||
debouncedUpdatesEnabled?: boolean;
|
||||
onStart?: (json: any, html: string) => void;
|
||||
onChange?: (json: any, html: string) => void;
|
||||
@@ -25,7 +30,6 @@ interface CustomEditorProps {
|
||||
forwardedRef?: any;
|
||||
mentionHighlights?: string[];
|
||||
mentionSuggestions?: IMentionSuggestion[];
|
||||
cancelUploadImage?: () => any;
|
||||
}
|
||||
|
||||
export const useEditor = ({
|
||||
@@ -39,6 +43,7 @@ export const useEditor = ({
|
||||
onChange,
|
||||
setIsSubmitting,
|
||||
forwardedRef,
|
||||
restoreFile,
|
||||
setShouldShowAlert,
|
||||
mentionHighlights,
|
||||
mentionSuggestions,
|
||||
@@ -56,6 +61,7 @@ export const useEditor = ({
|
||||
mentionHighlights: mentionHighlights ?? [],
|
||||
},
|
||||
deleteFile,
|
||||
restoreFile,
|
||||
cancelUploadImage,
|
||||
),
|
||||
...extensions,
|
||||
@@ -63,7 +69,7 @@ export const useEditor = ({
|
||||
content:
|
||||
typeof value === "string" && value.trim() !== "" ? value : "<p></p>",
|
||||
onCreate: async ({ editor }) => {
|
||||
onStart?.(editor.getJSON(), getTrimmedHTML(editor.getHTML()))
|
||||
onStart?.(editor.getJSON(), getTrimmedHTML(editor.getHTML()));
|
||||
},
|
||||
onUpdate: async ({ editor }) => {
|
||||
// for instant feedback loop
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
import { CoreReadOnlyEditorExtensions } from "../../ui/read-only/extensions";
|
||||
import { CoreReadOnlyEditorProps } from "../../ui/read-only/props";
|
||||
import { EditorProps } from "@tiptap/pm/view";
|
||||
import { IMentionSuggestion } from "../../types/mention-suggestion";
|
||||
import { IMentionSuggestion } from "@plane/editor-types";
|
||||
|
||||
interface CustomReadOnlyEditorProps {
|
||||
value: string;
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
"use client";
|
||||
import * as React from "react";
|
||||
import { Extension } from "@tiptap/react";
|
||||
import { UploadImage } from "../types/upload-image";
|
||||
import { DeleteImage } from "../types/delete-image";
|
||||
import { getEditorClassNames } from "../lib/utils";
|
||||
import { EditorProps } from "@tiptap/pm/view";
|
||||
import { useEditor } from "./hooks/useEditor";
|
||||
import { EditorContainer } from "../ui/components/editor-container";
|
||||
import { EditorContentWrapper } from "../ui/components/editor-content";
|
||||
import { IMentionSuggestion } from "../types/mention-suggestion";
|
||||
import {
|
||||
UploadImage,
|
||||
DeleteImage,
|
||||
IMentionSuggestion,
|
||||
} from "@plane/editor-types";
|
||||
|
||||
interface ICoreEditor {
|
||||
value: string;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { IMentionSuggestion } from "@plane/editor-types";
|
||||
import { Editor } from "@tiptap/react";
|
||||
import React, {
|
||||
forwardRef,
|
||||
@@ -7,8 +8,6 @@ import React, {
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
import { IMentionSuggestion } from "../../types/mention-suggestion";
|
||||
|
||||
interface MentionListProps {
|
||||
items: IMentionSuggestion[];
|
||||
command: (item: {
|
||||
|
||||
@@ -2,7 +2,8 @@ import { Mention, MentionOptions } from "@tiptap/extension-mention";
|
||||
import { mergeAttributes } from "@tiptap/core";
|
||||
import { ReactNodeViewRenderer } from "@tiptap/react";
|
||||
import mentionNodeView from "./mentionNodeView";
|
||||
import { IMentionHighlight } from "../../types/mention-suggestion";
|
||||
import { IMentionHighlight } from "@plane/editor-types";
|
||||
|
||||
export interface CustomMentionOptions extends MentionOptions {
|
||||
mentionHighlights: IMentionHighlight[];
|
||||
readonly?: boolean;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user