Compare commits

..

14 Commits

Author SHA1 Message Date
NarayanBavisetti
4ce3fa356a fix: changed migration and added public s3 2023-10-27 12:50:12 +05:30
NarayanBavisetti
2628890068 Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-10-27 12:19:33 +05:30
pablohashescobar
0d07ecb337 dev: rearrange migrations and add class calls 2023-09-25 15:08:09 +05:30
pablohashescobar
8745f20ef4 Merge branch 'dev/private_bucket_for_attachments' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-09-25 13:40:56 +05:30
pablohashescobar
05578e4729 Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-09-25 13:40:15 +05:30
pablohashescobar
b3a755a311 dev: fix migration inconsistency 2023-09-25 13:38:42 +05:30
pablohashescobar
c12cb26c4d Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-09-25 13:20:10 +05:30
pablohashescobar
8d0cd67198 dev: update configuration for the self hosted instance 2023-09-25 13:00:34 +05:30
pablohashescobar
ad9ff684e9 Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-09-25 12:29:59 +05:30
pablohashescobar
7e6e6531ad dev: self hosted private settings 2023-09-25 12:29:09 +05:30
pablohashescobar
c76d1dc8db Merge branch 'develop' of github.com:makeplane/plane into dev/private_bucket_for_attachments 2023-09-22 13:22:14 +05:30
pablohashescobar
5a7b19ae78 dev: update configuration 2023-09-22 13:16:58 +05:30
pablohashescobar
dd760b4f38 dev: add overwrite configuration and configuration for self hosted version 2023-09-22 12:57:58 +05:30
pablohashescobar
edceef71b4 dev: new private bucket for storing attachments 2023-09-22 12:25:18 +05:30
5806 changed files with 159213 additions and 382935 deletions

View File

@@ -2,17 +2,5 @@
*.pyc
.env
venv
.venv
node_modules/
**/node_modules/
npm-debug.log
.next/
**/.next/
.turbo/
**/.turbo/
build/
**/build/
out/
**/out/
dist/
**/dist/
node_modules
npm-debug.log

View File

@@ -1,19 +1,14 @@
# Database Settings
POSTGRES_USER="plane"
POSTGRES_PASSWORD="plane"
POSTGRES_DB="plane"
PGDATA="/var/lib/postgresql/data"
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
# RabbitMQ Settings
RABBITMQ_HOST="plane-mq"
RABBITMQ_PORT="5672"
RABBITMQ_USER="plane"
RABBITMQ_PASSWORD="plane"
RABBITMQ_VHOST="plane"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# AWS Settings
AWS_REGION=""
@@ -21,26 +16,21 @@ AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
AWS_PUBLIC_STORAGE_BUCKET_NAME="uploads"
AWS_PRIVATE_STORAGE_BUCKET_NAME="uploads-private"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
OPENAI_API_KEY="sk-" # deprecated
GPT_ENGINE="gpt-3.5-turbo" # deprecated
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Settings related to Docker
DOCKERIZED=1 # deprecated
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Force HTTPS for handling SSL Termination
MINIO_ENDPOINT_SSL=0
# API key rate limit
API_KEY_RATE_LIMIT="60/minute"

10
.eslintrc.js Normal file
View File

@@ -0,0 +1,10 @@
module.exports = {
root: true,
// This tells ESLint to load the config from the package `eslint-config-custom`
extends: ["custom"],
settings: {
next: {
rootDir: ["web/", "space/"],
},
},
};

1
.gitattributes vendored
View File

@@ -1 +0,0 @@
*.sh text eol=lf

View File

@@ -1,8 +1,7 @@
name: Bug report
description: Create a bug report to help us improve Plane
title: "[bug]: "
labels: [🐛bug]
assignees: [vihar, pushya22]
labels: [bug, need testing]
body:
- type: markdown
attributes:
@@ -45,7 +44,7 @@ body:
- Deploy preview
validations:
required: true
- type: dropdown
type: dropdown
id: browser
attributes:
label: Browser
@@ -55,19 +54,12 @@ body:
- Safari
- Other
- type: dropdown
id: variant
id: version
attributes:
label: Variant
label: Version
options:
- Cloud
- Self-hosted
- Local
validations:
required: true
- type: input
id: version
attributes:
label: Version
placeholder: v0.17.0-dev
validations:
required: true

View File

@@ -1,8 +1,7 @@
name: Feature request
description: Suggest a feature to improve Plane
title: "[feature]: "
labels: [feature]
assignees: [vihar, pushya22]
labels: [feature]
body:
- type: markdown
attributes:

View File

@@ -1,20 +0,0 @@
### Description
<!-- Provide a detailed description of the changes in this PR -->
### Type of Change
<!-- Put an 'x' in the boxes that apply -->
- [ ] Bug fix (non-breaking change which fixes an issue)
- [ ] Feature (non-breaking change which adds functionality)
- [ ] Improvement (change that would cause existing functionality to not work as expected)
- [ ] Code refactoring
- [ ] Performance improvements
- [ ] Documentation update
### Screenshots and Media (if applicable)
<!-- Add screenshots to help explain your changes, ideally showcasing before and after -->
### Test Scenarios
<!-- Please describe the tests that you ran to verify your changes -->
### References
<!-- Link related issues if there are any -->

View File

@@ -1,139 +0,0 @@
name: Build AIO Base Image
on:
workflow_dispatch:
inputs:
base_tag_name:
description: 'Base Tag Name'
required: false
default: ''
env:
TARGET_BRANCH: ${{ github.ref_name }}
jobs:
base_build_setup:
name: Build Preparation
runs-on: ubuntu-latest
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
image_tag: ${{ steps.set_env_variables.outputs.IMAGE_TAG }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
echo "IMAGE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
echo "IMAGE_TAG=latest" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
echo "IMAGE_TAG=preview" >> $GITHUB_OUTPUT
else
echo "IMAGE_TAG=develop" >> $GITHUB_OUTPUT
fi
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
fi
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
full_base_build_push:
runs-on: ubuntu-latest
needs: [base_build_setup]
env:
BASE_IMG_TAG: makeplane/plane-aio-base:full-${{ needs.base_build_setup.outputs.image_tag }}
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v6.9.0
with:
context: ./aio
file: ./aio/Dockerfile-base-full
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.BASE_IMG_TAG }}
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
slim_base_build_push:
runs-on: ubuntu-latest
needs: [base_build_setup]
env:
BASE_IMG_TAG: makeplane/plane-aio-base:slim-${{ needs.base_build_setup.outputs.image_tag }}
BUILDX_DRIVER: ${{ needs.base_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.base_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.base_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.base_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Check out the repo
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v6.9.0
with:
context: ./aio
file: ./aio/Dockerfile-base-slim
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.BASE_IMG_TAG }}
push: true
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

View File

@@ -1,207 +0,0 @@
name: Branch Build AIO
on:
workflow_dispatch:
inputs:
full:
description: 'Run full build'
type: boolean
required: false
default: false
slim:
description: 'Run slim build'
type: boolean
required: false
default: false
base_tag_name:
description: 'Base Tag Name'
required: false
default: ''
release:
types: [released, prereleased]
env:
TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
FULL_BUILD_INPUT: ${{ github.event.inputs.full }}
SLIM_BUILD_INPUT: ${{ github.event.inputs.slim }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ubuntu-latest
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
echo "AIO_BASE_TAG=latest" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
if [ "${{ github.event_name}}" == "workflow_dispatch" ] && [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
elif [ "${{ env.TARGET_BRANCH }}" == "preview" ]; then
echo "AIO_BASE_TAG=preview" >> $GITHUB_OUTPUT
else
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
fi
fi
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
if [ "${{ env.FULL_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
echo "DO_FULL_BUILD=true" >> $GITHUB_OUTPUT
else
echo "DO_FULL_BUILD=false" >> $GITHUB_OUTPUT
fi
if [ "${{ env.SLIM_BUILD_INPUT }}" == "true" ] || [ "${{github.event_name}}" == "push" ] || [ "${{github.event_name}}" == "release" ]; then
echo "DO_SLIM_BUILD=true" >> $GITHUB_OUTPUT
else
echo "DO_SLIM_BUILD=false" >> $GITHUB_OUTPUT
fi
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
full_build_push:
if: ${{ needs.branch_build_setup.outputs.do_full_build == 'true' }}
runs-on: ubuntu-22.04
needs: [branch_build_setup]
env:
BUILD_TYPE: full
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
AIO_IMAGE_TAGS: makeplane/plane-aio:full-${{ needs.branch_build_setup.outputs.flat_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
else
TAG=${{ env.AIO_IMAGE_TAGS }}
fi
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v6.9.0
with:
context: .
file: ./aio/Dockerfile-app
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.AIO_IMAGE_TAGS }}
push: true
build-args: |
BASE_TAG=${{ env.AIO_BASE_TAG }}
BUILD_TYPE=${{env.BUILD_TYPE}}
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
slim_build_push:
if: ${{ needs.branch_build_setup.outputs.do_slim_build == 'true' }}
runs-on: ubuntu-22.04
needs: [branch_build_setup]
env:
BUILD_TYPE: slim
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
AIO_IMAGE_TAGS: makeplane/plane-aio:slim-${{ needs.branch_build_setup.outputs.flat_branch_name }}
TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Docker Tag
run: |
if [ "${{ github.event_name }}" == "release" ]; then
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-stable,makeplane/plane-aio:${{env.BUILD_TYPE}}-${{ github.event.release.tag_name }}
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=makeplane/plane-aio:${{env.BUILD_TYPE}}-latest
else
TAG=${{ env.AIO_IMAGE_TAGS }}
fi
echo "AIO_IMAGE_TAGS=${TAG}" >> $GITHUB_ENV
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v6.9.0
with:
context: .
file: ./aio/Dockerfile-app
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.AIO_IMAGE_TAGS }}
push: true
build-args: |
BASE_TAG=${{ env.AIO_BASE_TAG }}
BUILD_TYPE=${{env.BUILD_TYPE}}
cache-from: type=gha
cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

View File

@@ -1,295 +0,0 @@
name: Branch Build CE
on:
workflow_dispatch:
inputs:
build_type:
description: "Type of build to run"
required: true
type: choice
default: "Build"
options:
- "Build"
- "Release"
releaseVersion:
description: "Release Version"
type: string
default: v0.0.0
isPrerelease:
description: "Is Pre-release"
type: boolean
default: false
required: true
arm64:
description: "Build for ARM64 architecture"
required: false
default: false
type: boolean
push:
branches:
- preview
- canary
env:
TARGET_BRANCH: ${{ github.ref_name }}
ARM64_BUILD: ${{ github.event.inputs.arm64 }}
BUILD_TYPE: ${{ github.event.inputs.build_type }}
RELEASE_VERSION: ${{ github.event.inputs.releaseVersion }}
IS_PRERELEASE: ${{ github.event.inputs.isPrerelease }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ubuntu-22.04
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
dh_img_admin: ${{ steps.set_env_variables.outputs.DH_IMG_ADMIN }}
dh_img_live: ${{ steps.set_env_variables.outputs.DH_IMG_LIVE }}
dh_img_backend: ${{ steps.set_env_variables.outputs.DH_IMG_BACKEND }}
dh_img_proxy: ${{ steps.set_env_variables.outputs.DH_IMG_PROXY }}
build_type: ${{steps.set_env_variables.outputs.BUILD_TYPE}}
build_release: ${{ steps.set_env_variables.outputs.BUILD_RELEASE }}
build_prerelease: ${{ steps.set_env_variables.outputs.BUILD_PRERELEASE }}
release_version: ${{ steps.set_env_variables.outputs.RELEASE_VERSION }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
if [ "${{ env.ARM64_BUILD }}" == "true" ] || ([ "${{ env.BUILD_TYPE }}" == "Release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ]); then
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
else
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
fi
BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" |sed 's/[^a-zA-Z0-9.-]//g')
echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
echo "DH_IMG_WEB=plane-frontend" >> $GITHUB_OUTPUT
echo "DH_IMG_SPACE=plane-space" >> $GITHUB_OUTPUT
echo "DH_IMG_ADMIN=plane-admin" >> $GITHUB_OUTPUT
echo "DH_IMG_LIVE=plane-live" >> $GITHUB_OUTPUT
echo "DH_IMG_BACKEND=plane-backend" >> $GITHUB_OUTPUT
echo "DH_IMG_PROXY=plane-proxy" >> $GITHUB_OUTPUT
echo "BUILD_TYPE=${{env.BUILD_TYPE}}" >> $GITHUB_OUTPUT
BUILD_RELEASE=false
BUILD_PRERELEASE=false
RELVERSION="latest"
if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
FLAT_RELEASE_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | sed 's/[^a-zA-Z0-9.-]//g')
echo "FLAT_RELEASE_VERSION=${FLAT_RELEASE_VERSION}" >> $GITHUB_OUTPUT
semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
if [[ ! $FLAT_RELEASE_VERSION =~ $semver_regex ]]; then
echo "Invalid Release Version Format : $FLAT_RELEASE_VERSION"
echo "Please provide a valid SemVer version"
echo "e.g. v1.2.3 or v1.2.3-alpha-1"
echo "Exiting the build process"
exit 1 # Exit with status 1 to fail the step
fi
BUILD_RELEASE=true
RELVERSION=$FLAT_RELEASE_VERSION
if [ "${{ env.IS_PRERELEASE }}" == "true" ]; then
BUILD_PRERELEASE=true
fi
fi
echo "BUILD_RELEASE=${BUILD_RELEASE}" >> $GITHUB_OUTPUT
echo "BUILD_PRERELEASE=${BUILD_PRERELEASE}" >> $GITHUB_OUTPUT
echo "RELEASE_VERSION=${RELVERSION}" >> $GITHUB_OUTPUT
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
branch_build_push_admin:
name: Build-Push Admin Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Admin Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
build-context: .
dockerfile-path: ./apps/admin/Dockerfile.admin
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_web:
name: Build-Push Web Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Web Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
build-context: .
dockerfile-path: ./apps/web/Dockerfile.web
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_space:
name: Build-Push Space Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Space Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
build-context: .
dockerfile-path: ./apps/space/Dockerfile.space
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_live:
name: Build-Push Live Collaboration Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Live Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
build-context: .
dockerfile-path: ./apps/live/Dockerfile.live
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_api:
name: Build-Push API Server Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Backend Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
build-context: ./apps/api
dockerfile-path: ./apps/api/Dockerfile.api
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
branch_build_push_proxy:
name: Build-Push Proxy Docker Image
runs-on: ubuntu-22.04
needs: [branch_build_setup]
steps:
- name: Proxy Build and Push
uses: makeplane/actions/build-push@v1.0.0
with:
build-release: ${{ needs.branch_build_setup.outputs.build_release }}
build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
release-version: ${{ needs.branch_build_setup.outputs.release_version }}
dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
docker-image-owner: makeplane
docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
build-context: ./apps/proxy
dockerfile-path: ./apps/proxy/Dockerfile.ce
buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
publish_release:
if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
name: Build Release
runs-on: ubuntu-22.04
needs:
[
branch_build_setup,
branch_build_push_admin,
branch_build_push_web,
branch_build_push_space,
branch_build_push_live,
branch_build_push_api,
branch_build_push_proxy,
]
env:
REL_VERSION: ${{ needs.branch_build_setup.outputs.release_version }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Update Assets
run: |
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
- name: Create Release
id: create_release
uses: softprops/action-gh-release@v2.1.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
with:
tag_name: ${{ env.REL_VERSION }}
name: ${{ env.REL_VERSION }}
draft: false
prerelease: ${{ env.IS_PRERELEASE }}
generate_release_notes: true
files: |
${{ github.workspace }}/deploy/selfhost/setup.sh
${{ github.workspace }}/deploy/selfhost/swarm.sh
${{ github.workspace }}/deploy/selfhost/restore.sh
${{ github.workspace }}/deploy/selfhost/restore-airgapped.sh
${{ github.workspace }}/deploy/selfhost/docker-compose.yml
${{ github.workspace }}/deploy/selfhost/variables.env

View File

@@ -1,95 +1,48 @@
name: Build and Lint on Pull Request
name: Build Pull Request Contents
on:
workflow_dispatch:
on:
pull_request:
types: ["opened", "synchronize", "ready_for_review"]
types: ["opened", "synchronize"]
jobs:
lint-server:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.x" # Specify the Python version you need
- name: Install Pylint
run: python -m pip install ruff
- name: Install Server Dependencies
run: cd apps/server && pip install -r requirements.txt
- name: Lint apps/server
run: ruff check --fix apps/server
build-pull-request-contents:
name: Build Pull Request Contents
runs-on: ubuntu-20.04
permissions:
pull-requests: read
lint-admin:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install
- run: yarn lint --filter=admin
- name: Checkout Repository to Actions
uses: actions/checkout@v3.3.0
lint-space:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
- name: Setup Node.js 18.x
uses: actions/setup-node@v2
with:
node-version: 20.x
- run: yarn install
- run: yarn lint --filter=space
node-version: 18.x
cache: 'yarn'
- name: Get changed files
id: changed-files
uses: tj-actions/changed-files@v38
with:
files_yaml: |
apiserver:
- apiserver/**
web:
- web/**
deploy:
- space/**
lint-web:
if: github.event.pull_request.draft == false
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install
- run: yarn lint --filter=web
- name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true'
run: |
yarn
yarn build --filter=web
build-admin:
needs: lint-admin
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install
- run: yarn build --filter=admin
- name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: |
yarn
yarn build --filter=space
build-space:
needs: lint-space
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install
- run: yarn build --filter=space
build-web:
needs: lint-web
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20.x
- run: yarn install
- run: yarn build --filter=web

View File

@@ -1,45 +0,0 @@
name: Version Change Before Release
on:
pull_request:
branches:
- master
jobs:
check-version:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.head_ref }}
fetch-depth: 0
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '18'
- name: Get PR Branch version
run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
- name: Fetch base branch
run: git fetch origin master:master
- name: Get Master Branch version
run: |
git checkout master
echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
- name: Get master branch version and compare
run: |
echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
echo "Version in PR branch is the same as in master. Failing the CI."
exit 1
else
echo "Version check passed. Versions are different."
fi
env:
PR_VERSION: ${{ env.PR_VERSION }}
MASTER_VERSION: ${{ env.MASTER_VERSION }}

View File

@@ -1,64 +0,0 @@
name: "CodeQL"
on:
workflow_dispatch:
push:
branches: ["preview", "master"]
pull_request:
branches: ["develop", "preview", "master"]
schedule:
- cron: "53 19 * * 5"
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: ["python", "javascript"]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

79
.github/workflows/create-sync-pr.yml vendored Normal file
View File

@@ -0,0 +1,79 @@
name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
branches:
- master
types:
- closed
jobs:
create_pr:
# Only run the job when a PR is merged
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: read
steps:
- name: Check SOURCE_REPO
id: check_repo
env:
SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
run: |
echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
- name: Checkout Code
if: steps.check_repo.outputs.is_correct_repo == 'true'
uses: actions/checkout@v2
with:
persist-credentials: false
fetch-depth: 0
- name: Set up Branch Name
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
- name: Setup GH CLI
if: steps.check_repo.outputs.is_correct_repo == 'true'
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt update
sudo apt install gh -y
- name: Create Pull Request
if: steps.check_repo.outputs.is_correct_repo == 'true'
env:
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
git checkout $SOURCE_BRANCH
git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
git push target $SOURCE_BRANCH:$SOURCE_BRANCH
PR_TITLE="${{ github.event.pull_request.title }}"
PR_BODY="${{ github.event.pull_request.body }}"
# Remove double quotes
PR_TITLE_CLEANED="${PR_TITLE//\"/}"
PR_BODY_CLEANED="${PR_BODY//\"/}"
# Construct PR_BODY_CONTENT using a here-document
PR_BODY_CONTENT=$(cat <<EOF
$PR_BODY_CLEANED
EOF
)
gh pr create \
--base $TARGET_BRANCH \
--head $SOURCE_BRANCH \
--title "[SYNC] $PR_TITLE_CLEANED" \
--body "$PR_BODY_CONTENT" \
--repo $TARGET_REPO

View File

@@ -1,168 +0,0 @@
name: Feature Preview
on:
workflow_dispatch:
inputs:
base_tag_name:
description: 'Base Tag Name'
required: false
default: 'preview'
env:
TARGET_BRANCH: ${{ github.ref_name }}
jobs:
branch_build_setup:
name: Build Setup
runs-on: ubuntu-latest
outputs:
gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
steps:
- id: set_env_variables
name: Set Environment Variables
run: |
echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
else
echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
fi
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
- id: checkout_files
name: Checkout Files
uses: actions/checkout@v4
full_build_push:
runs-on: ubuntu-22.04
needs: [branch_build_setup]
env:
BUILD_TYPE: full
AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
AIO_IMAGE_TAGS: makeplane/plane-aio-feature:${{ needs.branch_build_setup.outputs.flat_branch_name }}
BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: ${{ env.BUILDX_DRIVER }}
version: ${{ env.BUILDX_VERSION }}
endpoint: ${{ env.BUILDX_ENDPOINT }}
- name: Check out the repo
uses: actions/checkout@v4
- name: Build and Push to Docker Hub
uses: docker/build-push-action@v6.9.0
with:
context: .
file: ./aio/Dockerfile-app
platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.AIO_IMAGE_TAGS }}
push: true
build-args:
BUILD_TAG=${{ env.AIO_BASE_TAG }}
BUILD_TYPE=${{env.BUILD_TYPE}}
# cache-from: type=gha
# cache-to: type=gha,mode=max
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
outputs:
AIO_IMAGE_TAGS: ${{ env.AIO_IMAGE_TAGS }}
feature-deploy:
needs: [branch_build_setup, full_build_push]
name: Feature Deploy
runs-on: ubuntu-latest
env:
KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
DEPLOYMENT_NAME: ${{ needs.branch_build_setup.outputs.flat_branch_name }}
steps:
- name: Install AWS cli
run: |
sudo apt-get update
sudo apt-get install -y python3-pip
pip3 install awscli
- name: Tailscale
uses: tailscale/github-action@v2
with:
oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
tags: tag:ci
- name: Kubectl Setup
run: |
curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
chmod +x kubectl
mkdir -p ~/.kube
echo "$KUBE_CONFIG_FILE" > ~/.kube/config
chmod 600 ~/.kube/config
- name: HELM Setup
run: |
curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
chmod 700 get_helm.sh
./get_helm.sh
- name: App Deploy
run: |
helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
helm --kube-insecure-skip-tls-verify uninstall \
${{ env.DEPLOYMENT_NAME }} \
--namespace $APP_NAMESPACE \
--timeout 10m0s \
--wait \
--ignore-not-found
METADATA=$(helm --kube-insecure-skip-tls-verify upgrade \
--install=true \
--namespace $APP_NAMESPACE \
--set dockerhub.loginid=${{ secrets.DOCKERHUB_USERNAME }} \
--set dockerhub.password=${{ secrets.DOCKERHUB_TOKEN_RO}} \
--set config.feature_branch=${{ env.DEPLOYMENT_NAME }} \
--set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
--set ingress.tls_secret=${{vars.FEATURE_PREVIEW_INGRESS_TLS_SECRET || '' }} \
--output json \
--timeout 10m0s \
--wait \
${{ env.DEPLOYMENT_NAME }} feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} )
APP_NAME=$(echo $METADATA | jq -r '.name')
INGRESS_HOSTNAME=$(kubectl get ingress -n $APP_NAMESPACE --insecure-skip-tls-verify \
-o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
jq -r '.spec.rules[0].host')
echo "****************************************"
echo "APP NAME ::: $APP_NAME"
echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
echo "****************************************"

View File

@@ -1,52 +0,0 @@
name: Create PR on Sync
on:
workflow_dispatch:
push:
branches:
- "sync/**"
env:
CURRENT_BRANCH: ${{ github.ref_name }}
TARGET_BRANCH: "preview" # The target branch that you would like to merge changes like develop
GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
ACCOUNT_USER_NAME: ${{ vars.ACCOUNT_USER_NAME }}
ACCOUNT_USER_EMAIL: ${{ vars.ACCOUNT_USER_EMAIL }}
jobs:
create_pull_request:
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for all branches and tags
- name: Setup Git
run: |
git config user.name "$ACCOUNT_USER_NAME"
git config user.email "$ACCOUNT_USER_EMAIL"
- name: Setup GH CLI and Git Config
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt update
sudo apt install gh -y
- name: Create PR to Target Branch
run: |
# get all pull requests and check if there is already a PR
PR_EXISTS=$(gh pr list --base $TARGET_BRANCH --head $CURRENT_BRANCH --state open --json number | jq '.[] | .number')
if [ -n "$PR_EXISTS" ]; then
echo "Pull Request already exists: $PR_EXISTS"
else
echo "Creating new pull request"
PR_URL=$(gh pr create --base $TARGET_BRANCH --head $CURRENT_BRANCH --title "${{ vars.SYNC_PR_TITLE }}" --body "")
echo "Pull Request created: $PR_URL"
fi

View File

@@ -1,44 +0,0 @@
name: Sync Repositories
on:
workflow_dispatch:
push:
branches:
- preview
env:
SOURCE_BRANCH_NAME: ${{ github.ref_name }}
jobs:
sync_changes:
runs-on: ubuntu-22.04
permissions:
pull-requests: write
contents: read
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
persist-credentials: false
fetch-depth: 0
- name: Setup GH CLI
run: |
type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt update
sudo apt install gh -y
- name: Push Changes to Target Repo
env:
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
TARGET_REPO="${{ vars.SYNC_TARGET_REPO }}"
TARGET_BRANCH="${{ vars.SYNC_TARGET_BRANCH_NAME }}"
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
git checkout $SOURCE_BRANCH
git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH

View File

@@ -0,0 +1,107 @@
name: Update Docker Images for Plane on Release
on:
release:
types: [released, prereleased]
jobs:
build_push_backend:
name: Build and Push Api Server Docker Image
runs-on: ubuntu-20.04
steps:
- name: Check out the repo
uses: actions/checkout@v3.3.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaFrontend
uses: docker/metadata-action@v4.3.0
with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
tags: |
type=ref,event=tag
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaBackend
uses: docker/metadata-action@v4.3.0
with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
tags: |
type=ref,event=tag
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaSpace
uses: docker/metadata-action@v4.3.0
with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
tags: |
type=ref,event=tag
- name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
id: metaProxy
uses: docker/metadata-action@v4.3.0
with:
images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
tags: |
type=ref,event=tag
- name: Build and Push Frontend to Docker Container Registry
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./web/Dockerfile.web
platforms: linux/amd64
tags: ${{ steps.metaFrontend.outputs.tags }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Backend to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: ./apiserver
file: ./apiserver/Dockerfile.api
platforms: linux/amd64
push: true
tags: ${{ steps.metaBackend.outputs.tags }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Plane-Deploy to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./space/Dockerfile.space
platforms: linux/amd64
push: true
tags: ${{ steps.metaSpace.outputs.tags }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and Push Plane-Proxy to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: ./nginx
file: ./nginx/Dockerfile
platforms: linux/amd64
push: true
tags: ${{ steps.metaProxy.outputs.tags }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

19
.gitignore vendored
View File

@@ -1,6 +1,5 @@
node_modules
.next
.yarn
### NextJS ###
# Dependencies
@@ -17,8 +16,7 @@ node_modules
# Production
/build
dist/
out/
dist
# Misc
.DS_Store
@@ -52,9 +50,6 @@ staticfiles
mediafiles
.env
.DS_Store
logs/
htmlcov/
.coverage
node_modules/
assets/dist/
@@ -79,19 +74,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
.secrets
tmp/
## packages
dist
.temp/
deploy/selfhost/plane-app/
## Storybook
*storybook.log
output.css
dev-editor
# Redis
*.rdb
*.rdb.gz

View File

@@ -1,16 +0,0 @@
{ pkgs, ... }: {
# Which nixpkgs channel to use.
channel = "stable-23.11"; # or "unstable"
# Use https://search.nixos.org/packages to find packages
packages = [
pkgs.nodejs_20
pkgs.python3
];
services.docker.enable = true;
services.postgres.enable = true;
services.redis.enable = true;
}

View File

@@ -1 +0,0 @@
nodeLinker: node-modules

View File

@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
squawk@plane.so.
hello@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@@ -4,57 +4,37 @@ Thank you for showing an interest in contributing to Plane! All kinds of contrib
## Submitting an issue
Before submitting a new issue, please search the [issues](https://github.com/makeplane/plane/issues) tab. Maybe an issue or discussion already exists and might inform you of workarounds. Otherwise, you can give new information.
Before submitting a new issue, please search the [issues](https://github.com/makeplane/plane/issues) tab. Maybe an issue or discussion already exists and might inform you of workarounds. Otherwise, you can give new informplaneation.
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
- 3rd-party libraries being used and their versions
- a use-case that fails
- 3rd-party libraries being used and their versions
- a use-case that fails
Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
### Naming conventions for issues
When opening a new issue, please use a clear and concise title that follows this format:
- For bugs: `🐛 Bug: [short description]`
- For features: `🚀 Feature: [short description]`
- For improvements: `🛠️ Improvement: [short description]`
- For documentation: `📘 Docs: [short description]`
**Examples:**
- `🐛 Bug: API token expiry time not saving correctly`
- `📘 Docs: Clarify RAM requirement for local setup`
- `🚀 Feature: Allow custom time selection for token expiration`
This helps us triage and manage issues more efficiently.
## Projects setup and Architecture
### Requirements
- Docker Engine installed and running
- Node.js version 20+ [LTS version](https://nodejs.org/en/about/previous-releases)
- Python version 3.8+
- Postgres version v14
- Redis version v6.2.7
- **Memory**: Minimum **12 GB RAM** recommended
> ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
- Node.js version v16.18.0
- Python version 3.8+
- Postgres version v14
- Redis version v6.2.7
### Setup the project
The project is a monorepo, with backend api and frontend in a single repo.
The backend is a django project which is kept inside apps/api
The backend is a django project which is kept inside apiserver
1. Clone the repo
```bash
git clone https://github.com/makeplane/plane.git [folder-name]
cd [folder-name]
git clone https://github.com/makeplane/plane
cd plane
chmod +x setup.sh
```
@@ -64,23 +44,34 @@ chmod +x setup.sh
./setup.sh
```
3. Start the containers
3. Define `NEXT_PUBLIC_API_BASE_URL=http://localhost` in **web/.env** and **space/.env** file
```bash
docker compose -f docker-compose-local.yml up
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./web/.env
```
4. Start web apps:
```bash
echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
```
4. Run Docker compose up
```bash
docker compose up -d
```
5. Install dependencies
```bash
yarn install
```
6. Run the web app in development mode
```bash
yarn dev
```
5. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
6. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
Thats it! Youre all set to begin coding. Remember to refresh your browser if changes dont auto-reload. Happy contributing! 🎉
## Missing a Feature?
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
@@ -90,158 +81,20 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
- All features or bug fixes must be tested by one or more specs (unit-tests).
- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Ways to contribute
- Try Plane Cloud and the self hosting platform and give feedback
- Add new integrations
- Add or update translations
- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
- Share your thoughts and suggestions with us
- Help create tutorials and blog posts
- Request a feature by submitting a proposal
- Report a bug
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
## Contributing to language support
This guide is designed to help contributors understand how to add or update translations in the application.
### Understanding translation structure
#### File organization
Translations are organized by language in the locales directory. Each language has its own folder containing JSON files for translations. Here's how it looks:
```
packages/i18n/src/locales/
├── en/
│ ├── core.json # Critical translations
│ └── translations.json
├── fr/
│ └── translations.json
└── [language]/
└── translations.json
```
#### Nested structure
To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
```json
{
"issue": {
"label": "Work item",
"title": {
"label": "Work item title"
}
}
}
```
### Translation formatting guide
We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
#### Examples
- **Simple variables**
```json
{
"greeting": "Hello, {name}!"
}
```
- **Pluralization**
```json
{
"items": "{count, plural, one {Work item} other {Work items}}"
}
```
### Contributing guidelines
#### Updating existing translations
1. Locate the key in `locales/<language>/translations.json`.
2. Update the value while ensuring the key structure remains intact.
3. Preserve any existing ICU formats (e.g., variables, pluralization).
#### Adding new translation keys
1. When introducing a new key, ensure it is added to **all** language files, even if translations are not immediately available. Use English as a placeholder if needed.
2. Keep the nesting structure consistent across all languages.
3. If the new key requires dynamic content (e.g., variables or pluralization), ensure the ICU format is applied uniformly across all languages.
### Adding new languages
Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
1. **Update type definitions**
Add the new language to the TLanguage type in the language definitions file:
```typescript
// types/language.ts
export type TLanguage = "en" | "fr" | "your-lang";
```
2. **Add language configuration**
Include the new language in the list of supported languages:
```typescript
// constants/language.ts
export const SUPPORTED_LANGUAGES: ILanguageOption[] = [
{ label: "English", value: "en" },
{ label: "Your Language", value: "your-lang" }
];
```
3. **Create translation files**
1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
2. Add a `translations.json` file inside the folder.
3. Copy the structure from an existing translation file and translate all keys.
4. **Update import logic**
Modify the language import logic to include your new language:
```typescript
private importLanguageFile(language: TLanguage): Promise<any> {
switch (language) {
case "your-lang":
return import("../locales/your-lang/translations.json");
// ...
}
}
```
### Quality checklist
Before submitting your contribution, please ensure the following:
- All translation keys exist in every language file.
- Nested structures match across all language files.
- ICU message formats are correctly implemented.
- All languages load without errors in the application.
- Dynamic values and pluralization work as expected.
- There are no missing or untranslated keys.
#### Pro tips
- When in doubt, refer to the English translations for context.
- Verify pluralization works with different numbers.
- Ensure dynamic values (e.g., `{name}`) are correctly interpolated.
- Double-check that nested key access paths are accurate.
Happy translating! 🌍✨
- All features or bug fixes must be tested by one or more specs (unit-tests).
- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
Questions, suggestions, and thoughts are most welcome. We can also be reached in our [Discord Server](https://discord.com/invite/A92xrEGCge).
## Ways to contribute
- Try Plane Cloud and the self hosting platform and give feedback
- Add new integrations
- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
- Share your thoughts and suggestions with us
- Help create tutorials and blog posts
- Request a feature by submitting a proposal
- Report a bug
- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.

132
Dockerfile Normal file
View File

@@ -0,0 +1,132 @@
FROM node:18-alpine AS builder
RUN apk add --no-cache libc6-compat
# Set working directory
WORKDIR /app
ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER
RUN yarn global add turbo
RUN apk add tree
COPY . .
RUN turbo prune --scope=app --scope=plane-deploy --docker
CMD tree -I node_modules/
# Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer
RUN apk add --no-cache libc6-compat
WORKDIR /app
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
COPY --from=builder /app/out/yarn.lock ./yarn.lock
RUN yarn install
# # Build the project
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
COPY replace-env-vars.sh /usr/local/bin/
USER root
RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN yarn turbo run build
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL}
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV DJANGO_SETTINGS_MODULE plane.settings.production
ENV DOCKERIZED 1
WORKDIR /code
RUN apk --no-cache add \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2" \
"nginx" \
"nodejs" \
"npm" \
"supervisor"
COPY apiserver/requirements.txt ./
COPY apiserver/requirements ./requirements
RUN apk add --no-cache libffi-dev
RUN apk add --no-cache --virtual .build-deps \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers" \
&& \
pip install -r requirements.txt --compile --no-cache-dir \
&& \
apk del .build-deps
# Add in Django deps and generate Django's static files
COPY apiserver/manage.py manage.py
COPY apiserver/plane plane/
COPY apiserver/templates templates/
COPY apiserver/gunicorn.config.py ./
RUN apk --no-cache add "bash~=5.2"
COPY apiserver/bin ./bin/
RUN chmod +x ./bin/takeoff ./bin/worker
RUN chmod -R 777 /code
# Expose container port and run entry point script
WORKDIR /app
# Don't run production as root
RUN addgroup --system --gid 1001 plane
RUN adduser --system --uid 1001 captain
COPY --from=installer /app/apps/app/next.config.js .
COPY --from=installer /app/apps/app/package.json .
COPY --from=installer /app/apps/space/next.config.js .
COPY --from=installer /app/apps/space/package.json .
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next
ENV NEXT_TELEMETRY_DISABLED 1
# RUN rm /etc/nginx/conf.d/default.conf
#######################################################################
COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
#######################################################################
COPY nginx/supervisor.conf /code/supervisor.conf
ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
USER root
COPY replace-env-vars.sh /usr/local/bin/
COPY start.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN chmod +x /usr/local/bin/start.sh
EXPOSE 80
CMD ["supervisord","-c","/code/supervisor.conf"]

134
ENV_SETUP.md Normal file
View File

@@ -0,0 +1,134 @@
# Environment Variables
Environment variables are distributed in various files. Please refer them carefully.
## {PROJECT_FOLDER}/.env
File is available in the project root folder
```
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
```
## {PROJECT_FOLDER}/web/.env.example
```
# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
# Public boards deploy URL
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
```
## {PROJECT_FOLDER}/spaces/.env.example
```
# Flag to toggle OAuth
NEXT_PUBLIC_ENABLE_OAUTH=0
```
## {PROJECT_FOLDER}/apiserver/.env
```
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
# Error logs
SENTRY_DSN=""
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
EMAIL_USE_TLS="1"
EMAIL_USE_SSL="0"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Default Creds
DEFAULT_EMAIL="captain@plane.so"
DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
# Email Redirection URL
WEB_URL="http://localhost"
```
## Updates
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
- The naming convention for containers and images has been updated.
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
- The image name for Plane deployment has been changed to plane-space.

138
README.md
View File

@@ -5,8 +5,9 @@
<img src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_logo_.webp" alt="Plane Logo" width="70">
</a>
</p>
<h1 align="center"><b>Plane</b></h1>
<p align="center"><b>Open-source project management that unlocks customer value</b></p>
<h3 align="center"><b>Plane</b></h3>
<p align="center"><b>Open-source, self-hosted project planning tool</b></p>
<p align="center">
<a href="https://discord.com/invite/A92xrEGCge">
@@ -15,13 +16,6 @@
<img alt="Commit activity per month" src="https://img.shields.io/github/commit-activity/m/makeplane/plane?style=for-the-badge" />
</p>
<p align="center">
<a href="https://plane.so/"><b>Website</b></a>
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a>
<a href="https://twitter.com/planepowers"><b>Twitter</b></a>
<a href="https://docs.plane.so/"><b>Documentation</b></a>
</p>
<p>
<a href="https://app.plane.so/#gh-light-mode-only" target="_blank">
<img
@@ -39,65 +33,60 @@
</a>
</p>
Meet [Plane](https://plane.so/), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘‍♀️
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘‍♀️.
> Plane is evolving every day. Your suggestions, ideas, and reported bugs help us immensely. Do not hesitate to join in the conversation on [Discord](https://discord.com/invite/A92xrEGCge) or raise a GitHub issue. We read everything and respond to most.
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
## 🚀 Installation
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
Getting started with Plane is simple. Choose the setup that works best for you:
## ⚡️ Contributors Quick Start
- **Plane Cloud**
Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
### Prerequisite
- **Self-host Plane**
Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
Development system must have docker engine installed and running.
| Installation methods | Docs link |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
| Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://developers.plane.so/self-hosting/methods/docker-compose) |
| Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://developers.plane.so/self-hosting/methods/kubernetes) |
### Steps
`Instance admins` can configure instance settings with [God mode](https://developers.plane.so/self-hosting/govern/instance-admin).
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
## 🌟 Features
1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
1. Switch to the code folder `cd plane`
1. Create your feature or fix branch you plan to work on using `git checkout -b <feature-branch-name>`
1. Open terminal and run `./setup.sh`
1. Open the code on VSCode or similar equivalent IDE
1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
- **Issues**
Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
```bash
./setup.sh
```
- **Cycles**
Maintain your teams momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
- **Modules**
Simplify complex projects by dividing them into smaller, manageable modules.
Thats it!
- **Views**
Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
## 🍙 Self Hosting
- **Pages**
Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
- **Analytics**
Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
## 🚀 Features
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
## 🛠️ Local development
See [CONTRIBUTING](./CONTRIBUTING.md)
## ⚙️ Built with
[![Next JS](https://img.shields.io/badge/next.js-000000?style=for-the-badge&logo=nextdotjs&logoColor=white)](https://nextjs.org/)
[![Django](https://img.shields.io/badge/Django-092E20?style=for-the-badge&logo=django&logoColor=green)](https://www.djangoproject.com/)
[![Node JS](https://img.shields.io/badge/node.js-339933?style=for-the-badge&logo=Node.js&logoColor=white)](https://nodejs.org/en)
- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
## 📸 Screenshots
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Issues_rNZjrGgFl.png?updatedAt=1709298765880"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_views_dark_mode.webp"
alt="Plane Views"
width="100%"
/>
@@ -106,7 +95,8 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Cycles_jCDhqmTl9.png?updatedAt=1709298780697"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_issue_detail_dark_mode.webp"
alt="Plane Issue Details"
width="100%"
/>
</a>
@@ -114,7 +104,7 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Modules_PSCVsbSfI.png?updatedAt=1709298796783"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_cycles_modules_dark_mode.webp"
alt="Plane Cycles and Modules"
width="100%"
/>
@@ -123,7 +113,7 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Views_uxXsRatS4.png?updatedAt=1709298834522"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_analytics_dark_mode.webp"
alt="Plane Analytics"
width="100%"
/>
@@ -132,17 +122,17 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Analytics_0o22gLRtp.png?updatedAt=1709298834389"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_pages_dark_mode.webp"
alt="Plane Pages"
width="100%"
/>
</a>
</p>
</p>
<p>
<p>
<a href="https://plane.so" target="_blank">
<img
src="https://ik.imagekit.io/w2okwbtu2/Drive_LlfeY4xn3.png?updatedAt=1709298837917"
src="https://plane-marketing.s3.ap-south-1.amazonaws.com/plane-readme/plane_commad_k_dark_mode.webp"
alt="Plane Command Menu"
width="100%"
/>
@@ -150,42 +140,20 @@ See [CONTRIBUTING](./CONTRIBUTING.md)
</p>
</p>
## 📝 Documentation
Explore Plane's [product documentation](https://docs.plane.so/) and [developer documentation](https://developers.plane.so/) to learn about features, setup, and usage.
## 📚Documentation
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md).
## ❤️ Community
Join the Plane community on [GitHub Discussions](https://github.com/orgs/makeplane/discussions) and our [Discord server](https://discord.com/invite/A92xrEGCge). We follow a [Code of conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) in all our community channels.
The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
Feel free to ask questions, report bugs, participate in discussions, share ideas, request features, or showcase your projects. Wed love to hear from you!
To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
## 🛡️ Security
Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
If you discover a security vulnerability in Plane, please report it responsibly instead of opening a public issue. We take all legitimate reports seriously and will investigate them promptly. See [Security policy](https://github.com/makeplane/plane/blob/master/SECURITY.md) for more info.
## ⛓️ Security
To disclose any security issues, please email us at security@plane.so.
## 🤝 Contributing
There are many ways you can contribute to Plane:
- Report [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) or submit [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+).
- Review the [documentation](https://docs.plane.so/) and submit [pull requests](https://github.com/makeplane/docs) to improve it—whether it's fixing typos or adding new content.
- Talk or write about Plane or any other ecosystem integration and [let us know](https://discord.com/invite/A92xrEGCge)!
- Show your support by upvoting [popular feature requests](https://github.com/makeplane/plane/issues).
Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md) for details on the process for submitting pull requests to us.
### Repo activity
![Plane Repo Activity](https://repobeats.axiom.co/api/embed/2523c6ed2f77c082b7908c33e2ab208981d76c39.svg "Repobeats analytics image")
### We couldn't have done this without you.
<a href="https://github.com/makeplane/plane/graphs/contributors">
<img src="https://contrib.rocks/image?repo=makeplane/plane" />
</a>
## License
This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.

View File

@@ -1,39 +0,0 @@
# Security policy
This document outlines the security protocols and vulnerability reporting guidelines for the Plane project. Ensuring the security of our systems is a top priority, and while we work diligently to maintain robust protection, vulnerabilities may still occur. We highly value the communitys role in identifying and reporting security concerns to uphold the integrity of our systems and safeguard our users.
## Reporting a vulnerability
If you have identified a security vulnerability, submit your findings to [security@plane.so](mailto:security@plane.so).
Ensure your report includes all relevant information needed for us to reproduce and assess the issue. Include the IP address or URL of the affected system.
To ensure a responsible and effective disclosure process, please adhere to the following:
- Maintain confidentiality and refrain from publicly disclosing the vulnerability until we have had the opportunity to investigate and address the issue.
- Refrain from running automated vulnerability scans on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
- Do not exploit any discovered vulnerabilities for malicious purposes, such as accessing or altering user data.
- Do not engage in physical security attacks, social engineering, distributed denial of service (DDoS) attacks, spam campaigns, or attacks on third-party applications as part of your vulnerability testing.
## Out of scope
While we appreciate all efforts to assist in improving our security, please note that the following types of vulnerabilities are considered out of scope:
- Vulnerabilities requiring man-in-the-middle (MITM) attacks or physical access to a users device.
- Content spoofing or text injection issues without a clear attack vector or the ability to modify HTML/CSS.
- Issues related to email spoofing.
- Missing DNSSEC, CAA, or CSP headers.
- Absence of secure or HTTP-only flags on non-sensitive cookies.
## Our commitment
At Plane, we are committed to maintaining transparent and collaborative communication throughout the vulnerability resolution process. Here's what you can expect from us:
- **Response Time** <br/>
We will acknowledge receipt of your vulnerability report within three business days and provide an estimated timeline for resolution.
- **Legal Protection** <br/>
We will not initiate legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
- **Confidentiality** <br/>
Your report will be treated with confidentiality. We will not disclose your personal information to third parties without your consent.
- **Recognition** <br/>
With your permission, we are happy to publicly acknowledge your contribution to improving our security once the issue is resolved.
- **Timely Resolution** <br/>
We are committed to working closely with you throughout the resolution process, providing timely updates as necessary. Our goal is to address all reported vulnerabilities swiftly, and we will actively engage with you to coordinate a responsible disclosure once the issue is fully resolved.
We appreciate your help in ensuring the security of our platform. Your contributions are crucial to protecting our users and maintaining a secure environment. Thank you for working with us to keep Plane safe.

View File

@@ -1,182 +0,0 @@
ARG BASE_TAG=develop
ARG BUILD_TYPE=full
# *****************************************************************************
# STAGE 1: Build the project
# *****************************************************************************
FROM node:18-alpine AS builder
RUN apk add --no-cache libc6-compat
# Set working directory
WORKDIR /app
RUN yarn global add turbo
COPY . .
RUN turbo prune --scope=web --scope=space --scope=admin --docker
# *****************************************************************************
# STAGE 2: Install dependencies & build the project
# *****************************************************************************
# Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer
RUN apk add --no-cache libc6-compat
WORKDIR /app
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
COPY --from=builder /app/out/yarn.lock ./yarn.lock
RUN yarn install
# # Build the project
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
ARG NEXT_PUBLIC_API_BASE_URL=""
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
ARG NEXT_PUBLIC_WEB_BASE_URL=""
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
ENV NEXT_TELEMETRY_DISABLED=1
ENV TURBO_TELEMETRY_DISABLED=1
RUN yarn turbo run build --filter=web --filter=space --filter=admin
# *****************************************************************************
# STAGE 3: Copy the project and start it
# *****************************************************************************
FROM makeplane/plane-aio-base:${BUILD_TYPE}-${BASE_TAG} AS runner
WORKDIR /app
SHELL [ "/bin/bash", "-c" ]
# PYTHON APPLICATION SETUP
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
COPY apps/api/requirements.txt ./api/
COPY apps/api/requirements ./api/requirements
RUN pip install -r ./api/requirements.txt --compile --no-cache-dir
# Add in Django deps and generate Django's static files
COPY apps/api/manage.py ./api/manage.py
COPY apps/api/plane ./api/plane/
COPY apps/api/templates ./api/templates/
COPY package.json ./api/package.json
COPY apps/api/bin ./api/bin/
RUN chmod +x ./api/bin/*
RUN chmod -R 777 ./api/
# NEXTJS BUILDS
COPY --from=installer /app/web/next.config.js ./web/
COPY --from=installer /app/web/package.json ./web/
COPY --from=installer /app/web/.next/standalone ./web
COPY --from=installer /app/web/.next/static ./web/web/.next/static
COPY --from=installer /app/web/public ./web/web/public
COPY --from=installer /app/space/next.config.js ./space/
COPY --from=installer /app/space/package.json ./space/
COPY --from=installer /app/space/.next/standalone ./space
COPY --from=installer /app/space/.next/static ./space/space/.next/static
COPY --from=installer /app/space/public ./space/space/public
COPY --from=installer /app/admin/next.config.js ./admin/
COPY --from=installer /app/admin/package.json ./admin/
COPY --from=installer /app/admin/.next/standalone ./admin
COPY --from=installer /app/admin/.next/static ./admin/admin/.next/static
COPY --from=installer /app/admin/public ./admin/admin/public
ARG NEXT_PUBLIC_API_BASE_URL=""
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
ARG NEXT_PUBLIC_SPACE_BASE_URL=""
ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
ARG NEXT_PUBLIC_WEB_BASE_URL=""
ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
ENV NEXT_TELEMETRY_DISABLED=1
ENV TURBO_TELEMETRY_DISABLED=1
ARG BUILD_TYPE=full
ENV BUILD_TYPE=$BUILD_TYPE
COPY aio/supervisord-${BUILD_TYPE}-base /app/supervisord.conf
COPY aio/supervisord-app /app/supervisord-app
RUN cat /app/supervisord-app >> /app/supervisord.conf && \
rm /app/supervisord-app
COPY ./aio/nginx.conf /etc/nginx/nginx.conf.template
# if build type is full, run the below copy pg-setup.sh
COPY aio/postgresql.conf /etc/postgresql/postgresql.conf
COPY aio/pg-setup.sh /app/pg-setup.sh
RUN chmod +x /app/pg-setup.sh
# *****************************************************************************
# APPLICATION ENVIRONMENT SETTINGS
# *****************************************************************************
ENV APP_DOMAIN=localhost
ENV WEB_URL=http://${APP_DOMAIN}
ENV DEBUG=0
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
# Secret Key
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5
# Gunicorn Workers
ENV GUNICORN_WORKERS=1
ENV POSTGRES_USER="plane"
ENV POSTGRES_PASSWORD="plane"
ENV POSTGRES_DB="plane"
ENV POSTGRES_HOST="localhost"
ENV POSTGRES_PORT="5432"
ENV DATABASE_URL="postgresql://plane:plane@localhost:5432/plane"
ENV REDIS_HOST="localhost"
ENV REDIS_PORT="6379"
ENV REDIS_URL="redis://localhost:6379"
ENV USE_MINIO="1"
ENV AWS_REGION=""
ENV AWS_ACCESS_KEY_ID="access-key"
ENV AWS_SECRET_ACCESS_KEY="secret-key"
ENV AWS_S3_ENDPOINT_URL="http://localhost:9000"
ENV AWS_S3_BUCKET_NAME="uploads"
ENV MINIO_ROOT_USER="access-key"
ENV MINIO_ROOT_PASSWORD="secret-key"
ENV BUCKET_NAME="uploads"
ENV FILE_SIZE_LIMIT="5242880"
# *****************************************************************************
RUN /app/pg-setup.sh
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]

View File

@@ -1,73 +0,0 @@
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
FROM python:3.12-slim
# Set environment variables to non-interactive for apt
ENV DEBIAN_FRONTEND=noninteractive
ENV BUILD_TYPE=full
SHELL [ "/bin/bash", "-c" ]
WORKDIR /app
RUN mkdir -p /app/{data,logs} && \
mkdir -p /app/data/{redis,pg,minio,nginx} && \
mkdir -p /app/logs/{access,error} && \
mkdir -p /etc/supervisor/conf.d
# Update the package list and install prerequisites
RUN apt-get update && \
apt-get install -y \
gnupg2 curl ca-certificates lsb-release software-properties-common \
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
sudo lsof net-tools libpq-dev procps gettext
# Install Redis 7.2
RUN echo "deb http://deb.debian.org/debian $(lsb_release -cs)-backports main" > /etc/apt/sources.list.d/backports.list && \
curl -fsSL https://packages.redis.io/gpg | gpg --dearmor -o /usr/share/keyrings/redis-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/redis-archive-keyring.gpg] https://packages.redis.io/deb $(lsb_release -cs) main" > /etc/apt/sources.list.d/redis.list && \
apt-get update && \
apt-get install -y redis-server
# Install PostgreSQL 15
ENV POSTGRES_VERSION=15
RUN curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /usr/share/keyrings/pgdg-archive-keyring.gpg && \
echo "deb [signed-by=/usr/share/keyrings/pgdg-archive-keyring.gpg] http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y postgresql-$POSTGRES_VERSION postgresql-client-$POSTGRES_VERSION && \
mkdir -p /var/lib/postgresql/data && \
chown -R postgres:postgres /var/lib/postgresql
COPY postgresql.conf /etc/postgresql/postgresql.conf
RUN sudo -u postgres /usr/lib/postgresql/$POSTGRES_VERSION/bin/initdb -D /var/lib/postgresql/data
# Install MinIO
ARG TARGETARCH
RUN if [ "$TARGETARCH" = "amd64" ]; then \
curl -fSl https://dl.min.io/server/minio/release/linux-amd64/minio -o /usr/local/bin/minio; \
elif [ "$TARGETARCH" = "arm64" ]; then \
curl -fSl https://dl.min.io/server/minio/release/linux-arm64/minio -o /usr/local/bin/minio; \
else \
echo "Unsupported architecture: $TARGETARCH"; exit 1; \
fi && \
chmod +x /usr/local/bin/minio
# Install Node.js 18
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
apt-get install -y nodejs && \
python -m pip install --upgrade pip && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Create Supervisor configuration file
COPY supervisord-full-base /app/supervisord.conf
COPY nginx.conf /etc/nginx/nginx.conf.template
COPY env.sh /app/nginx-start.sh
RUN chmod +x /app/nginx-start.sh
# Expose ports for Redis, PostgreSQL, and MinIO
EXPOSE 6379 5432 9000 80 443
# Start Supervisor
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]

View File

@@ -1,45 +0,0 @@
FROM --platform=$BUILDPLATFORM tonistiigi/binfmt AS binfmt
FROM python:3.12-slim
# Set environment variables to non-interactive for apt
ENV DEBIAN_FRONTEND=noninteractive
ENV BUILD_TYPE=slim
SHELL [ "/bin/bash", "-c" ]
WORKDIR /app
RUN mkdir -p /app/{data,logs} && \
mkdir -p /app/data/{nginx} && \
mkdir -p /app/logs/{access,error} && \
mkdir -p /etc/supervisor/conf.d
# Update the package list and install prerequisites
RUN apt-get update && \
apt-get install -y \
gnupg2 curl ca-certificates lsb-release software-properties-common \
build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
libsqlite3-dev wget llvm libncurses5-dev libncursesw5-dev xz-utils \
tk-dev libffi-dev liblzma-dev supervisor nginx nano vim ncdu \
sudo lsof net-tools libpq-dev procps gettext
# Install Node.js 18
RUN curl -fsSL https://deb.nodesource.com/setup_18.x | bash - && \
apt-get install -y nodejs
RUN python -m pip install --upgrade pip && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
# Create Supervisor configuration file
COPY supervisord-slim-base /app/supervisord.conf
COPY nginx.conf /etc/nginx/nginx.conf.template
COPY env.sh /app/nginx-start.sh
RUN chmod +x /app/nginx-start.sh
# Expose ports for Redis, PostgreSQL, and MinIO
EXPOSE 80 443
# Start Supervisor
CMD ["/usr/bin/supervisord", "-c", "/app/supervisord.conf"]

View File

@@ -1,7 +0,0 @@
#!/bin/bash
export dollar="$"
export http_upgrade="http_upgrade"
export scheme="scheme"
envsubst < /etc/nginx/nginx.conf.template > /etc/nginx/nginx.conf
exec nginx -g 'daemon off;'

View File

@@ -1,72 +0,0 @@
events {
}
http {
sendfile on;
server {
listen 80;
root /www/data/;
access_log /var/log/nginx/access.log;
client_max_body_size ${FILE_SIZE_LIMIT};
add_header X-Content-Type-Options "nosniff" always;
add_header Referrer-Policy "no-referrer-when-downgrade" always;
add_header Permissions-Policy "interest-cohort=()" always;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always;
add_header X-Forwarded-Proto "${dollar}scheme";
add_header X-Forwarded-Host "${dollar}host";
add_header X-Forwarded-For "${dollar}proxy_add_x_forwarded_for";
add_header X-Real-IP "${dollar}remote_addr";
location / {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3001/;
}
location /spaces/ {
rewrite ^/spaces/?$ /spaces/login break;
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3002/spaces/;
}
location /god-mode/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:3003/god-mode/;
}
location /api/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:8000/api/;
}
location /auth/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:8000/auth/;
}
location /${BUCKET_NAME}/ {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://localhost:9000/uploads/;
}
}
}

View File

@@ -1,14 +0,0 @@
#!/bin/bash
if [ "$BUILD_TYPE" == "full" ]; then
export PGHOST=localhost
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data start
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "CREATE USER $POSTGRES_USER WITH SUPERUSER PASSWORD '$POSTGRES_PASSWORD';" && \
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/createdb" -O "$POSTGRES_USER" "$POSTGRES_DB" && \
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/psql" --command "GRANT ALL PRIVILEGES ON DATABASE $POSTGRES_DB TO $POSTGRES_USER;" && \
sudo -u postgres "/usr/lib/postgresql/${POSTGRES_VERSION}/bin/pg_ctl" -D /var/lib/postgresql/data stop
fi

View File

@@ -1,815 +0,0 @@
# -----------------------------
# PostgreSQL configuration file
# -----------------------------
#
# This file consists of lines of the form:
#
# name = value
#
# (The "=" is optional.) Whitespace may be used. Comments are introduced with
# "#" anywhere on a line. The complete list of parameter names and allowed
# values can be found in the PostgreSQL documentation.
#
# The commented-out settings shown in this file represent the default values.
# Re-commenting a setting is NOT sufficient to revert it to the default value;
# you need to reload the server.
#
# This file is read on server startup and when the server receives a SIGHUP
# signal. If you edit the file on a running system, you have to SIGHUP the
# server for the changes to take effect, run "pg_ctl reload", or execute
# "SELECT pg_reload_conf()". Some parameters, which are marked below,
# require a server shutdown and restart to take effect.
#
# Any parameter can also be given as a command-line option to the server, e.g.,
# "postgres -c log_connections=on". Some parameters can be changed at run time
# with the "SET" SQL command.
#
# Memory units: B = bytes Time units: us = microseconds
# kB = kilobytes ms = milliseconds
# MB = megabytes s = seconds
# GB = gigabytes min = minutes
# TB = terabytes h = hours
# d = days
#------------------------------------------------------------------------------
# FILE LOCATIONS
#------------------------------------------------------------------------------
# The default values of these variables are driven from the -D command-line
# option or PGDATA environment variable, represented here as ConfigDir.
data_directory = '/var/lib/postgresql/data' # use data in another directory
# (change requires restart)
hba_file = '/etc/postgresql/15/main/pg_hba.conf' # host-based authentication file
# (change requires restart)
ident_file = '/etc/postgresql/15/main/pg_ident.conf' # ident configuration file
# (change requires restart)
# If external_pid_file is not explicitly set, no extra PID file is written.
external_pid_file = '/var/run/postgresql/15-main.pid' # write an extra PID file
# (change requires restart)
#------------------------------------------------------------------------------
# CONNECTIONS AND AUTHENTICATION
#------------------------------------------------------------------------------
# - Connection Settings -
listen_addresses = 'localhost' # what IP address(es) to listen on;
# comma-separated list of addresses;
# defaults to 'localhost'; use '*' for all
# (change requires restart)
port = 5432 # (change requires restart)
max_connections = 200 # (change requires restart)
#superuser_reserved_connections = 3 # (change requires restart)
unix_socket_directories = '/var/run/postgresql' # comma-separated list of directories
# (change requires restart)
#unix_socket_group = '' # (change requires restart)
#unix_socket_permissions = 0777 # begin with 0 to use octal notation
# (change requires restart)
#bonjour = off # advertise server via Bonjour
# (change requires restart)
#bonjour_name = '' # defaults to the computer name
# (change requires restart)
# - TCP settings -
# see "man tcp" for details
#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds;
# 0 selects the system default
#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds;
# 0 selects the system default
#tcp_keepalives_count = 0 # TCP_KEEPCNT;
# 0 selects the system default
#tcp_user_timeout = 0 # TCP_USER_TIMEOUT, in milliseconds;
# 0 selects the system default
#client_connection_check_interval = 0 # time between checks for client
# disconnection while running queries;
# 0 for never
# - Authentication -
#authentication_timeout = 1min # 1s-600s
#password_encryption = scram-sha-256 # scram-sha-256 or md5
#db_user_namespace = off
# GSSAPI using Kerberos
#krb_server_keyfile = 'FILE:${sysconfdir}/krb5.keytab'
#krb_caseins_users = off
# - SSL -
ssl = on
#ssl_ca_file = ''
ssl_cert_file = '/etc/ssl/certs/ssl-cert-snakeoil.pem'
#ssl_crl_file = ''
#ssl_crl_dir = ''
ssl_key_file = '/etc/ssl/private/ssl-cert-snakeoil.key'
#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers
#ssl_prefer_server_ciphers = on
#ssl_ecdh_curve = 'prime256v1'
#ssl_min_protocol_version = 'TLSv1.2'
#ssl_max_protocol_version = ''
#ssl_dh_params_file = ''
#ssl_passphrase_command = ''
#ssl_passphrase_command_supports_reload = off
#------------------------------------------------------------------------------
# RESOURCE USAGE (except WAL)
#------------------------------------------------------------------------------
# - Memory -
shared_buffers = 256MB # min 128kB
# (change requires restart)
#huge_pages = try # on, off, or try
# (change requires restart)
#huge_page_size = 0 # zero for system default
# (change requires restart)
#temp_buffers = 8MB # min 800kB
#max_prepared_transactions = 0 # zero disables the feature
# (change requires restart)
# Caution: it is not advisable to set max_prepared_transactions nonzero unless
# you actively intend to use prepared transactions.
#work_mem = 4MB # min 64kB
#hash_mem_multiplier = 2.0 # 1-1000.0 multiplier on hash table work_mem
#maintenance_work_mem = 64MB # min 1MB
#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem
#logical_decoding_work_mem = 64MB # min 64kB
#max_stack_depth = 2MB # min 100kB
#shared_memory_type = mmap # the default is the first option
# supported by the operating system:
# mmap
# sysv
# windows
# (change requires restart)
dynamic_shared_memory_type = posix # the default is usually the first option
# supported by the operating system:
# posix
# sysv
# windows
# mmap
# (change requires restart)
#min_dynamic_shared_memory = 0MB # (change requires restart)
# - Disk -
#temp_file_limit = -1 # limits per-process temp file space
# in kilobytes, or -1 for no limit
# - Kernel Resources -
#max_files_per_process = 1000 # min 64
# (change requires restart)
# - Cost-Based Vacuum Delay -
#vacuum_cost_delay = 0 # 0-100 milliseconds (0 disables)
#vacuum_cost_page_hit = 1 # 0-10000 credits
#vacuum_cost_page_miss = 2 # 0-10000 credits
#vacuum_cost_page_dirty = 20 # 0-10000 credits
#vacuum_cost_limit = 200 # 1-10000 credits
# - Background Writer -
#bgwriter_delay = 200ms # 10-10000ms between rounds
#bgwriter_lru_maxpages = 100 # max buffers written/round, 0 disables
#bgwriter_lru_multiplier = 2.0 # 0-10.0 multiplier on buffers scanned/round
#bgwriter_flush_after = 512kB # measured in pages, 0 disables
# - Asynchronous Behavior -
#backend_flush_after = 0 # measured in pages, 0 disables
#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching
#maintenance_io_concurrency = 10 # 1-1000; 0 disables prefetching
#max_worker_processes = 8 # (change requires restart)
#max_parallel_workers_per_gather = 2 # limited by max_parallel_workers
#max_parallel_maintenance_workers = 2 # limited by max_parallel_workers
#max_parallel_workers = 8 # number of max_worker_processes that
# can be used in parallel operations
#parallel_leader_participation = on
#old_snapshot_threshold = -1 # 1min-60d; -1 disables; 0 is immediate
# (change requires restart)
#------------------------------------------------------------------------------
# WRITE-AHEAD LOG
#------------------------------------------------------------------------------
# - Settings -
#wal_level = replica # minimal, replica, or logical
# (change requires restart)
#fsync = on # flush data to disk for crash safety
# (turning this off can cause
# unrecoverable data corruption)
#synchronous_commit = on # synchronization level;
# off, local, remote_write, remote_apply, or on
#wal_sync_method = fsync # the default is the first option
# supported by the operating system:
# open_datasync
# fdatasync (default on Linux and FreeBSD)
# fsync
# fsync_writethrough
# open_sync
#full_page_writes = on # recover from partial page writes
#wal_log_hints = off # also do full page writes of non-critical updates
# (change requires restart)
#wal_compression = off # enables compression of full-page writes;
# off, pglz, lz4, zstd, or on
#wal_init_zero = on # zero-fill new WAL files
#wal_recycle = on # recycle WAL files
#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers
# (change requires restart)
#wal_writer_delay = 200ms # 1-10000 milliseconds
#wal_writer_flush_after = 1MB # measured in pages, 0 disables
#wal_skip_threshold = 2MB
#commit_delay = 0 # range 0-100000, in microseconds
#commit_siblings = 5 # range 1-1000
# - Checkpoints -
#checkpoint_timeout = 5min # range 30s-1d
#checkpoint_completion_target = 0.9 # checkpoint target duration, 0.0 - 1.0
#checkpoint_flush_after = 256kB # measured in pages, 0 disables
#checkpoint_warning = 30s # 0 disables
max_wal_size = 1GB
min_wal_size = 80MB
# - Prefetching during recovery -
#recovery_prefetch = try # prefetch pages referenced in the WAL?
#wal_decode_buffer_size = 512kB # lookahead window used for prefetching
# (change requires restart)
# - Archiving -
#archive_mode = off # enables archiving; off, on, or always
# (change requires restart)
#archive_library = '' # library to use to archive a logfile segment
# (empty string indicates archive_command should
# be used)
#archive_command = '' # command to use to archive a logfile segment
# placeholders: %p = path of file to archive
# %f = file name only
# e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f'
#archive_timeout = 0 # force a logfile segment switch after this
# number of seconds; 0 disables
# - Archive Recovery -
# These are only used in recovery mode.
#restore_command = '' # command to use to restore an archived logfile segment
# placeholders: %p = path of file to restore
# %f = file name only
# e.g. 'cp /mnt/server/archivedir/%f %p'
#archive_cleanup_command = '' # command to execute at every restartpoint
#recovery_end_command = '' # command to execute at completion of recovery
# - Recovery Target -
# Set these only when performing a targeted recovery.
#recovery_target = '' # 'immediate' to end recovery as soon as a
# consistent state is reached
# (change requires restart)
#recovery_target_name = '' # the named restore point to which recovery will proceed
# (change requires restart)
#recovery_target_time = '' # the time stamp up to which recovery will proceed
# (change requires restart)
#recovery_target_xid = '' # the transaction ID up to which recovery will proceed
# (change requires restart)
#recovery_target_lsn = '' # the WAL LSN up to which recovery will proceed
# (change requires restart)
#recovery_target_inclusive = on # Specifies whether to stop:
# just after the specified recovery target (on)
# just before the recovery target (off)
# (change requires restart)
#recovery_target_timeline = 'latest' # 'current', 'latest', or timeline ID
# (change requires restart)
#recovery_target_action = 'pause' # 'pause', 'promote', 'shutdown'
# (change requires restart)
#------------------------------------------------------------------------------
# REPLICATION
#------------------------------------------------------------------------------
# - Sending Servers -
# Set these on the primary and on any standby that will send replication data.
#max_wal_senders = 10 # max number of walsender processes
# (change requires restart)
#max_replication_slots = 10 # max number of replication slots
# (change requires restart)
#wal_keep_size = 0 # in megabytes; 0 disables
#max_slot_wal_keep_size = -1 # in megabytes; -1 disables
#wal_sender_timeout = 60s # in milliseconds; 0 disables
#track_commit_timestamp = off # collect timestamp of transaction commit
# (change requires restart)
# - Primary Server -
# These settings are ignored on a standby server.
#synchronous_standby_names = '' # standby servers that provide sync rep
# method to choose sync standbys, number of sync standbys,
# and comma-separated list of application_name
# from standby(s); '*' = all
#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed
# - Standby Servers -
# These settings are ignored on a primary server.
#primary_conninfo = '' # connection string to sending server
#primary_slot_name = '' # replication slot on sending server
#promote_trigger_file = '' # file name whose presence ends recovery
#hot_standby = on # "off" disallows queries during recovery
# (change requires restart)
#max_standby_archive_delay = 30s # max delay before canceling queries
# when reading WAL from archive;
# -1 allows indefinite delay
#max_standby_streaming_delay = 30s # max delay before canceling queries
# when reading streaming WAL;
# -1 allows indefinite delay
#wal_receiver_create_temp_slot = off # create temp slot if primary_slot_name
# is not set
#wal_receiver_status_interval = 10s # send replies at least this often
# 0 disables
#hot_standby_feedback = off # send info from standby to prevent
# query conflicts
#wal_receiver_timeout = 60s # time that receiver waits for
# communication from primary
# in milliseconds; 0 disables
#wal_retrieve_retry_interval = 5s # time to wait before retrying to
# retrieve WAL after a failed attempt
#recovery_min_apply_delay = 0 # minimum delay for applying changes during recovery
# - Subscribers -
# These settings are ignored on a publisher.
#max_logical_replication_workers = 4 # taken from max_worker_processes
# (change requires restart)
#max_sync_workers_per_subscription = 2 # taken from max_logical_replication_workers
#------------------------------------------------------------------------------
# QUERY TUNING
#------------------------------------------------------------------------------
# - Planner Method Configuration -
#enable_async_append = on
#enable_bitmapscan = on
#enable_gathermerge = on
#enable_hashagg = on
#enable_hashjoin = on
#enable_incremental_sort = on
#enable_indexscan = on
#enable_indexonlyscan = on
#enable_material = on
#enable_memoize = on
#enable_mergejoin = on
#enable_nestloop = on
#enable_parallel_append = on
#enable_parallel_hash = on
#enable_partition_pruning = on
#enable_partitionwise_join = off
#enable_partitionwise_aggregate = off
#enable_seqscan = on
#enable_sort = on
#enable_tidscan = on
# - Planner Cost Constants -
#seq_page_cost = 1.0 # measured on an arbitrary scale
#random_page_cost = 4.0 # same scale as above
#cpu_tuple_cost = 0.01 # same scale as above
#cpu_index_tuple_cost = 0.005 # same scale as above
#cpu_operator_cost = 0.0025 # same scale as above
#parallel_setup_cost = 1000.0 # same scale as above
#parallel_tuple_cost = 0.1 # same scale as above
#min_parallel_table_scan_size = 8MB
#min_parallel_index_scan_size = 512kB
#effective_cache_size = 4GB
#jit_above_cost = 100000 # perform JIT compilation if available
# and query more expensive than this;
# -1 disables
#jit_inline_above_cost = 500000 # inline small functions if query is
# more expensive than this; -1 disables
#jit_optimize_above_cost = 500000 # use expensive JIT optimizations if
# query is more expensive than this;
# -1 disables
# - Genetic Query Optimizer -
#geqo = on
#geqo_threshold = 12
#geqo_effort = 5 # range 1-10
#geqo_pool_size = 0 # selects default based on effort
#geqo_generations = 0 # selects default based on effort
#geqo_selection_bias = 2.0 # range 1.5-2.0
#geqo_seed = 0.0 # range 0.0-1.0
# - Other Planner Options -
#default_statistics_target = 100 # range 1-10000
#constraint_exclusion = partition # on, off, or partition
#cursor_tuple_fraction = 0.1 # range 0.0-1.0
#from_collapse_limit = 8
#jit = on # allow JIT compilation
#join_collapse_limit = 8 # 1 disables collapsing of explicit
# JOIN clauses
#plan_cache_mode = auto # auto, force_generic_plan or
# force_custom_plan
#recursive_worktable_factor = 10.0 # range 0.001-1000000
#------------------------------------------------------------------------------
# REPORTING AND LOGGING
#------------------------------------------------------------------------------
# - Where to Log -
#log_destination = 'stderr' # Valid values are combinations of
# stderr, csvlog, jsonlog, syslog, and
# eventlog, depending on platform.
# csvlog and jsonlog require
# logging_collector to be on.
# This is used when logging to stderr:
#logging_collector = off # Enable capturing of stderr, jsonlog,
# and csvlog into log files. Required
# to be on for csvlogs and jsonlogs.
# (change requires restart)
# These are only used if logging_collector is on:
#log_directory = 'log' # directory where log files are written,
# can be absolute or relative to PGDATA
#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern,
# can include strftime() escapes
#log_file_mode = 0600 # creation mode for log files,
# begin with 0 to use octal notation
#log_rotation_age = 1d # Automatic rotation of logfiles will
# happen after that time. 0 disables.
#log_rotation_size = 10MB # Automatic rotation of logfiles will
# happen after that much log output.
# 0 disables.
#log_truncate_on_rotation = off # If on, an existing log file with the
# same name as the new log file will be
# truncated rather than appended to.
# But such truncation only occurs on
# time-driven rotation, not on restarts
# or size-driven rotation. Default is
# off, meaning append to existing files
# in all cases.
# These are relevant when logging to syslog:
#syslog_facility = 'LOCAL0'
#syslog_ident = 'postgres'
#syslog_sequence_numbers = on
#syslog_split_messages = on
# This is only relevant when logging to eventlog (Windows):
# (change requires restart)
#event_source = 'PostgreSQL'
# - When to Log -
#log_min_messages = warning # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic
#log_min_error_statement = error # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# info
# notice
# warning
# error
# log
# fatal
# panic (effectively off)
#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements
# and their durations, > 0 logs only
# statements running at least this number
# of milliseconds
#log_min_duration_sample = -1 # -1 is disabled, 0 logs a sample of statements
# and their durations, > 0 logs only a sample of
# statements running at least this number
# of milliseconds;
# sample fraction is determined by log_statement_sample_rate
#log_statement_sample_rate = 1.0 # fraction of logged statements exceeding
# log_min_duration_sample to be logged;
# 1.0 logs all such statements, 0.0 never logs
#log_transaction_sample_rate = 0.0 # fraction of transactions whose statements
# are logged regardless of their duration; 1.0 logs all
# statements from all transactions, 0.0 never logs
#log_startup_progress_interval = 10s # Time between progress updates for
# long-running startup operations.
# 0 disables the feature, > 0 indicates
# the interval in milliseconds.
# - What to Log -
#debug_print_parse = off
#debug_print_rewritten = off
#debug_print_plan = off
#debug_pretty_print = on
#log_autovacuum_min_duration = 10min # log autovacuum activity;
# -1 disables, 0 logs all actions and
# their durations, > 0 logs only
# actions running at least this number
# of milliseconds.
#log_checkpoints = on
#log_connections = off
#log_disconnections = off
#log_duration = off
#log_error_verbosity = default # terse, default, or verbose messages
#log_hostname = off
log_line_prefix = '%m [%p] %q%u@%d ' # special values:
# %a = application name
# %u = user name
# %d = database name
# %r = remote host and port
# %h = remote host
# %b = backend type
# %p = process ID
# %P = process ID of parallel group leader
# %t = timestamp without milliseconds
# %m = timestamp with milliseconds
# %n = timestamp with milliseconds (as a Unix epoch)
# %Q = query ID (0 if none or not computed)
# %i = command tag
# %e = SQL state
# %c = session ID
# %l = session line number
# %s = session start timestamp
# %v = virtual transaction ID
# %x = transaction ID (0 if none)
# %q = stop here in non-session
# processes
# %% = '%'
# e.g. '<%u%%%d> '
#log_lock_waits = off # log lock waits >= deadlock_timeout
#log_recovery_conflict_waits = off # log standby recovery conflict waits
# >= deadlock_timeout
#log_parameter_max_length = -1 # when logging statements, limit logged
# bind-parameter values to N bytes;
# -1 means print in full, 0 disables
#log_parameter_max_length_on_error = 0 # when logging an error, limit logged
# bind-parameter values to N bytes;
# -1 means print in full, 0 disables
#log_statement = 'none' # none, ddl, mod, all
#log_replication_commands = off
#log_temp_files = -1 # log temporary files equal or larger
# than the specified size in kilobytes;
# -1 disables, 0 logs all temp files
log_timezone = 'Etc/UTC'
#------------------------------------------------------------------------------
# PROCESS TITLE
#------------------------------------------------------------------------------
cluster_name = '15/main' # added to process titles if nonempty
# (change requires restart)
#update_process_title = on
#------------------------------------------------------------------------------
# STATISTICS
#------------------------------------------------------------------------------
# - Cumulative Query and Index Statistics -
#track_activities = on
#track_activity_query_size = 1024 # (change requires restart)
#track_counts = on
#track_io_timing = off
#track_wal_io_timing = off
#track_functions = none # none, pl, all
#stats_fetch_consistency = cache
# - Monitoring -
#compute_query_id = auto
#log_statement_stats = off
#log_parser_stats = off
#log_planner_stats = off
#log_executor_stats = off
#------------------------------------------------------------------------------
# AUTOVACUUM
#------------------------------------------------------------------------------
#autovacuum = on # Enable autovacuum subprocess? 'on'
# requires track_counts to also be on.
#autovacuum_max_workers = 3 # max number of autovacuum subprocesses
# (change requires restart)
#autovacuum_naptime = 1min # time between autovacuum runs
#autovacuum_vacuum_threshold = 50 # min number of row updates before
# vacuum
#autovacuum_vacuum_insert_threshold = 1000 # min number of row inserts
# before vacuum; -1 disables insert
# vacuums
#autovacuum_analyze_threshold = 50 # min number of row updates before
# analyze
#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum
#autovacuum_vacuum_insert_scale_factor = 0.2 # fraction of inserts over table
# size before insert vacuum
#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze
#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum
# (change requires restart)
#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age
# before forced vacuum
# (change requires restart)
#autovacuum_vacuum_cost_delay = 2ms # default vacuum cost delay for
# autovacuum, in milliseconds;
# -1 means use vacuum_cost_delay
#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for
# autovacuum, -1 means use
# vacuum_cost_limit
#------------------------------------------------------------------------------
# CLIENT CONNECTION DEFAULTS
#------------------------------------------------------------------------------
# - Statement Behavior -
#client_min_messages = notice # values in order of decreasing detail:
# debug5
# debug4
# debug3
# debug2
# debug1
# log
# notice
# warning
# error
#search_path = '"$user", public' # schema names
#row_security = on
#default_table_access_method = 'heap'
#default_tablespace = '' # a tablespace name, '' uses the default
#default_toast_compression = 'pglz' # 'pglz' or 'lz4'
#temp_tablespaces = '' # a list of tablespace names, '' uses
# only default tablespace
#check_function_bodies = on
#default_transaction_isolation = 'read committed'
#default_transaction_read_only = off
#default_transaction_deferrable = off
#session_replication_role = 'origin'
#statement_timeout = 0 # in milliseconds, 0 is disabled
#lock_timeout = 0 # in milliseconds, 0 is disabled
#idle_in_transaction_session_timeout = 0 # in milliseconds, 0 is disabled
#idle_session_timeout = 0 # in milliseconds, 0 is disabled
#vacuum_freeze_table_age = 150000000
#vacuum_freeze_min_age = 50000000
#vacuum_failsafe_age = 1600000000
#vacuum_multixact_freeze_table_age = 150000000
#vacuum_multixact_freeze_min_age = 5000000
#vacuum_multixact_failsafe_age = 1600000000
#bytea_output = 'hex' # hex, escape
#xmlbinary = 'base64'
#xmloption = 'content'
#gin_pending_list_limit = 4MB
# - Locale and Formatting -
datestyle = 'iso, mdy'
#intervalstyle = 'postgres'
timezone = 'Etc/UTC'
#timezone_abbreviations = 'Default' # Select the set of available time zone
# abbreviations. Currently, there are
# Default
# Australia (historical usage)
# India
# You can create your own file in
# share/timezonesets/.
#extra_float_digits = 1 # min -15, max 3; any value >0 actually
# selects precise output mode
#client_encoding = sql_ascii # actually, defaults to database
# encoding
# These settings are initialized by initdb, but they can be changed.
lc_messages = 'C.UTF-8' # locale for system error message
# strings
lc_monetary = 'C.UTF-8' # locale for monetary formatting
lc_numeric = 'C.UTF-8' # locale for number formatting
lc_time = 'C.UTF-8' # locale for time formatting
# default configuration for text search
default_text_search_config = 'pg_catalog.english'
# - Shared Library Preloading -
#local_preload_libraries = ''
#session_preload_libraries = ''
#shared_preload_libraries = '' # (change requires restart)
#jit_provider = 'llvmjit' # JIT library to use
# - Other Defaults -
#dynamic_library_path = '$libdir'
#extension_destdir = '' # prepend path when loading extensions
# and shared objects (added by Debian)
#gin_fuzzy_search_limit = 0
#------------------------------------------------------------------------------
# LOCK MANAGEMENT
#------------------------------------------------------------------------------
#deadlock_timeout = 1s
#max_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_transaction = 64 # min 10
# (change requires restart)
#max_pred_locks_per_relation = -2 # negative values mean
# (max_pred_locks_per_transaction
# / -max_pred_locks_per_relation) - 1
#max_pred_locks_per_page = 2 # min 0
#------------------------------------------------------------------------------
# VERSION AND PLATFORM COMPATIBILITY
#------------------------------------------------------------------------------
# - Previous PostgreSQL Versions -
#array_nulls = on
#backslash_quote = safe_encoding # on, off, or safe_encoding
#escape_string_warning = on
#lo_compat_privileges = off
#quote_all_identifiers = off
#standard_conforming_strings = on
#synchronize_seqscans = on
# - Other Platforms and Clients -
#transform_null_equals = off
#------------------------------------------------------------------------------
# ERROR HANDLING
#------------------------------------------------------------------------------
#exit_on_error = off # terminate session on any error?
#restart_after_crash = on # reinitialize after backend crash?
#data_sync_retry = off # retry or panic on failure to fsync
# data?
# (change requires restart)
#recovery_init_sync_method = fsync # fsync, syncfs (Linux 5.8+)
#------------------------------------------------------------------------------
# CONFIG FILE INCLUDES
#------------------------------------------------------------------------------
# These options allow settings to be loaded from files other than the
# default postgresql.conf. Note that these are directives, not variable
# assignments, so they can usefully be given more than once.
# include_dir = 'conf.d' # include files ending in '.conf' from
# a directory, e.g., 'conf.d'
#include_if_exists = '...' # include file only if it exists
#include = '...' # include file
#------------------------------------------------------------------------------
# CUSTOMIZED OPTIONS
#------------------------------------------------------------------------------
# Add settings for extensions here

View File

@@ -1,71 +0,0 @@
[program:web]
command=node /app/web/web/server.js
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3001,HOSTNAME=0.0.0.0
[program:space]
command=node /app/space/space/server.js
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3002,HOSTNAME=0.0.0.0
[program:admin]
command=node /app/admin/admin/server.js
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
environment=PORT=3003,HOSTNAME=0.0.0.0
[program:migrator]
directory=/app/api
command=sh -c "./bin/docker-entrypoint-migrator.sh"
autostart=true
autorestart=false
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:api]
directory=/app/api
command=sh -c "./bin/docker-entrypoint-api.sh"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:worker]
directory=/app/api
command=sh -c "./bin/docker-entrypoint-worker.sh"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0
[program:beat]
directory=/app/api
command=sh -c "./bin/docker-entrypoint-beat.sh"
autostart=true
autorestart=true
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stdout
stderr_logfile_maxbytes=0

View File

@@ -1,38 +0,0 @@
[supervisord]
user=root
nodaemon=true
stderr_logfile=/app/logs/error/supervisor.err.log
stdout_logfile=/app/logs/access/supervisor.log
[program:redis]
directory=/app/data/redis
command=redis-server
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/redis.err.log
stdout_logfile=/app/logs/access/redis.log
[program:postgresql]
user=postgres
command=/usr/lib/postgresql/15/bin/postgres --config-file=/etc/postgresql/postgresql.conf
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/postgresql.err.log
stdout_logfile=/app/logs/access/postgresql.log
[program:minio]
directory=/app/data/minio
command=minio server /app/data/minio
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/minio.err.log
stdout_logfile=/app/logs/access/minio.log
[program:nginx]
directory=/app/data/nginx
command=/app/nginx-start.sh
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/nginx.err.log
stdout_logfile=/app/logs/access/nginx.log

View File

@@ -1,14 +0,0 @@
[supervisord]
user=root
nodaemon=true
stderr_logfile=/app/logs/error/supervisor.err.log
stdout_logfile=/app/logs/access/supervisor.log
[program:nginx]
directory=/app/data/nginx
command=/app/nginx-start.sh
autostart=true
autorestart=true
stderr_logfile=/app/logs/error/nginx.err.log
stdout_logfile=/app/logs/access/nginx.log

72
apiserver/.env.example Normal file
View File

@@ -0,0 +1,72 @@
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
DJANGO_SETTINGS_MODULE="plane.settings.production"
# Error logs
SENTRY_DSN=""
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# Email Settings
EMAIL_HOST=""
EMAIL_HOST_USER=""
EMAIL_HOST_PASSWORD=""
EMAIL_PORT=587
EMAIL_FROM="Team Plane <team@mailer.plane.so>"
EMAIL_USE_TLS="1"
EMAIL_USE_SSL="0"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
# Changing this requires change in the nginx.conf for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# GPT settings
OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
# Default Creds
DEFAULT_EMAIL="captain@plane.so"
DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
# Enable Email/Password Signup
ENABLE_EMAIL_PASSWORD="1"
# Enable Magic link Login
ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"

61
apiserver/Dockerfile.api Normal file
View File

@@ -0,0 +1,61 @@
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
WORKDIR /code
RUN apk --no-cache add \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2"
COPY requirements.txt ./
COPY requirements ./requirements
RUN apk add --no-cache libffi-dev
RUN apk add --no-cache --virtual .build-deps \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers" \
&& \
pip install -r requirements.txt --compile --no-cache-dir \
&& \
apk del .build-deps
RUN addgroup -S plane && \
adduser -S captain -G plane
RUN chown captain.plane /code
USER captain
# Add in Django deps and generate Django's static files
COPY manage.py manage.py
COPY plane plane/
COPY templates templates/
COPY gunicorn.config.py ./
USER root
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
# CMD [ "./bin/takeoff" ]

52
apiserver/Dockerfile.dev Normal file
View File

@@ -0,0 +1,52 @@
FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
RUN apk --no-cache add \
"bash~=5.2" \
"libpq~=15" \
"libxslt~=1.1" \
"nodejs-current~=19" \
"xmlsec~=1.2" \
"libffi-dev" \
"bash~=5.2" \
"g++~=12.2" \
"gcc~=12.2" \
"cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
"libc-dev" \
"linux-headers"
WORKDIR /code
COPY requirements.txt ./requirements.txt
ADD requirements ./requirements
RUN pip install -r requirements.txt --compile --no-cache-dir
RUN addgroup -S plane && \
adduser -S captain -G plane
RUN chown captain.plane /code
USER captain
# Add in Django deps and generate Django's static files
USER root
# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
# CMD [ "./bin/takeoff" ]

3
apiserver/Procfile Normal file
View File

@@ -0,0 +1,3 @@
web: gunicorn -w 4 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:$PORT --max-requests 10000 --max-requests-jitter 1000 --access-logfile -
worker: celery -A plane worker -l info
beat: celery -A plane beat -l INFO

224
apiserver/back_migration.py Normal file
View File

@@ -0,0 +1,224 @@
# All the python scripts that are used for back migrations
import uuid
import random
from django.contrib.auth.hashers import make_password
from plane.db.models import ProjectIdentifier
from plane.db.models import (
Issue,
IssueComment,
User,
Project,
ProjectMember,
Label,
Integration,
)
# Update description and description html values for old descriptions
def update_description():
try:
issues = Issue.objects.all()
updated_issues = []
for issue in issues:
issue.description_html = f"<p>{issue.description}</p>"
issue.description_stripped = issue.description
updated_issues.append(issue)
Issue.objects.bulk_update(
updated_issues, ["description_html", "description_stripped"], batch_size=100
)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_comments():
try:
issue_comments = IssueComment.objects.all()
updated_issue_comments = []
for issue_comment in issue_comments:
issue_comment.comment_html = f"<p>{issue_comment.comment_stripped}</p>"
updated_issue_comments.append(issue_comment)
IssueComment.objects.bulk_update(
updated_issue_comments, ["comment_html"], batch_size=100
)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_project_identifiers():
try:
project_identifiers = ProjectIdentifier.objects.filter(
workspace_id=None
).select_related("project", "project__workspace")
updated_identifiers = []
for identifier in project_identifiers:
identifier.workspace_id = identifier.project.workspace_id
updated_identifiers.append(identifier)
ProjectIdentifier.objects.bulk_update(
updated_identifiers, ["workspace_id"], batch_size=50
)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_user_empty_password():
try:
users = User.objects.filter(password="")
updated_users = []
for user in users:
user.password = make_password(uuid.uuid4().hex)
user.is_password_autoset = True
updated_users.append(user)
User.objects.bulk_update(updated_users, ["password"], batch_size=50)
print("Success")
except Exception as e:
print(e)
print("Failed")
def updated_issue_sort_order():
try:
issues = Issue.objects.all()
updated_issues = []
for issue in issues:
issue.sort_order = issue.sequence_id * random.randint(100, 500)
updated_issues.append(issue)
Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_project_cover_images():
try:
project_cover_images = [
"https://images.unsplash.com/photo-1677432658720-3d84f9d657b4?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1661107564401-57497d8fe86f?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
"https://images.unsplash.com/photo-1677352241429-dc90cfc7a623?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
"https://images.unsplash.com/photo-1677196728306-eeafea692454?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1331&q=80",
"https://images.unsplash.com/photo-1660902179734-c94c944f7830?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1255&q=80",
"https://images.unsplash.com/photo-1672243775941-10d763d9adef?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1677040628614-53936ff66632?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1676920410907-8d5f8dd4b5ba?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
"https://images.unsplash.com/photo-1676846328604-ce831c481346?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1155&q=80",
"https://images.unsplash.com/photo-1676744843212-09b7e64c3a05?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1676798531090-1608bedeac7b?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1597088758740-56fd7ec8a3f0?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1169&q=80",
"https://images.unsplash.com/photo-1676638392418-80aad7c87b96?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=774&q=80",
"https://images.unsplash.com/photo-1649639194967-2fec0b4ea7bc?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1675883086902-b453b3f8146e?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=774&q=80",
"https://images.unsplash.com/photo-1675887057159-40fca28fdc5d?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1173&q=80",
"https://images.unsplash.com/photo-1675373980203-f84c5a672aa5?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1675191475318-d2bf6bad1200?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
"https://images.unsplash.com/photo-1675456230532-2194d0c4bcc0?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1170&q=80",
"https://images.unsplash.com/photo-1675371788315-60fa0ef48267?ixlib=rb-4.0.3&ixid=MnwxMjA3fDB8MHxwaG90by1wYWdlfHx8fGVufDB8fHx8&auto=format&fit=crop&w=1332&q=80",
]
projects = Project.objects.all()
updated_projects = []
for project in projects:
project.cover_image = project_cover_images[random.randint(0, 19)]
updated_projects.append(project)
Project.objects.bulk_update(updated_projects, ["cover_image"], batch_size=100)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_user_view_property():
try:
project_members = ProjectMember.objects.all()
updated_project_members = []
for project_member in project_members:
project_member.default_props = {
"filters": {"type": None},
"orderBy": "-created_at",
"collapsed": True,
"issueView": "list",
"filterIssue": None,
"groupByProperty": None,
"showEmptyGroups": True,
}
updated_project_members.append(project_member)
ProjectMember.objects.bulk_update(
updated_project_members, ["default_props"], batch_size=100
)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_label_color():
try:
labels = Label.objects.filter(color="")
updated_labels = []
for label in labels:
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
updated_labels.append(label)
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)
print("Success")
except Exception as e:
print(e)
print("Failed")
def create_slack_integration():
try:
_ = Integration.objects.create(provider="slack", network=2, title="Slack")
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_integration_verified():
try:
integrations = Integration.objects.all()
updated_integrations = []
for integration in integrations:
integration.verified = True
updated_integrations.append(integration)
Integration.objects.bulk_update(
updated_integrations, ["verified"], batch_size=10
)
print("Success")
except Exception as e:
print(e)
print("Failed")
def update_start_date():
try:
issues = Issue.objects.filter(state__group__in=["started", "completed"])
updated_issues = []
for issue in issues:
issue.start_date = issue.created_at.date()
updated_issues.append(issue)
Issue.objects.bulk_update(updated_issues, ["start_date"], batch_size=500)
print("Success")
except Exception as e:
print(e)
print("Failed")

5
apiserver/bin/beat Normal file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -e
python manage.py wait_for_db
celery -A plane beat -l info

9
apiserver/bin/takeoff Executable file
View File

@@ -0,0 +1,9 @@
#!/bin/bash
set -e
python manage.py wait_for_db
python manage.py migrate
# Create a Default User
python bin/user_script.py
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -0,0 +1,28 @@
import os, sys, random, string
import uuid
sys.path.append("/code")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
import django
django.setup()
from plane.db.models import User
def populate():
default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
if not User.objects.filter(email=default_email).exists():
user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
user.set_password(default_password)
user.save()
print(f"User created with an email: {default_email}")
else:
print(f"User already exists with the default email: {default_email}")
if __name__ == "__main__":
populate()

5
apiserver/bin/worker Executable file
View File

@@ -0,0 +1,5 @@
#!/bin/bash
set -e
python manage.py wait_for_db
celery -A plane worker -l info

View File

@@ -0,0 +1,6 @@
from psycogreen.gevent import patch_psycopg
def post_fork(server, worker):
patch_psycopg()
worker.log.info("Made Psycopg2 Green")

View File

@@ -2,8 +2,10 @@
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
if __name__ == '__main__':
os.environ.setdefault(
'DJANGO_SETTINGS_MODULE',
'plane.settings.production')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:

View File

@@ -0,0 +1,3 @@
from .celery import app as celery_app
__all__ = ('celery_app',)

View File

@@ -0,0 +1,5 @@
from django.apps import AppConfig
class AnalyticsConfig(AppConfig):
name = 'plane.analytics'

View File

@@ -0,0 +1,2 @@
from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission

View File

@@ -0,0 +1,104 @@
# Third Party imports
from rest_framework.permissions import BasePermission, SAFE_METHODS
# Module import
from plane.db.models import WorkspaceMember, ProjectMember
# Permission Mappings
Admin = 20
Member = 15
Viewer = 10
Guest = 5
class ProjectBasePermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user
).exists()
## Only workspace owners or admins can create the projects
if request.method == "POST":
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
).exists()
## Only Project Admins can update project attributes
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role=Admin,
project_id=view.project_id,
).exists()
class ProjectMemberPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user
).exists()
## Only workspace owners or admins can create the projects
if request.method == "POST":
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
).exists()
## Only Project Admins can update project attributes
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
project_id=view.project_id,
).exists()
class ProjectEntityPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
).exists()
## Only project members or admins can create and edit the project attributes
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[Admin, Member],
project_id=view.project_id,
).exists()
class ProjectLitePermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
).exists()

View File

@@ -0,0 +1,82 @@
# Third Party imports
from rest_framework.permissions import BasePermission, SAFE_METHODS
# Module imports
from plane.db.models import WorkspaceMember
# Permission Mappings
Owner = 20
Admin = 15
Member = 10
Guest = 5
# TODO: Move the below logic to python match - python v3.10
class WorkSpaceBasePermission(BasePermission):
def has_permission(self, request, view):
# allow anyone to create a workspace
if request.user.is_anonymous:
return False
if request.method == "POST":
return True
## Safe Methods
if request.method in SAFE_METHODS:
return True
# allow only admins and owners to update the workspace settings
if request.method in ["PUT", "PATCH"]:
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
).exists()
# allow only owner to delete the workspace
if request.method == "DELETE":
return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug, role=Owner
).exists()
class WorkSpaceAdminPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
).exists()
class WorkspaceEntityPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
).exists()
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Owner, Admin],
).exists()
class WorkspaceViewerPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug, role__gte=10
).exists()

View File

@@ -0,0 +1,102 @@
from .base import BaseSerializer
from .user import (
UserSerializer,
UserLiteSerializer,
ChangePasswordSerializer,
ResetPasswordSerializer,
UserAdminLiteSerializer,
UserMeSerializer,
UserMeSettingsSerializer,
)
from .workspace import (
WorkSpaceSerializer,
WorkSpaceMemberSerializer,
TeamSerializer,
WorkSpaceMemberInviteSerializer,
WorkspaceLiteSerializer,
WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
)
from .project import (
ProjectSerializer,
ProjectListSerializer,
ProjectDetailSerializer,
ProjectMemberSerializer,
ProjectMemberInviteSerializer,
ProjectIdentifierSerializer,
ProjectFavoriteSerializer,
ProjectLiteSerializer,
ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer,
)
from .state import StateSerializer, StateLiteSerializer
from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleFavoriteSerializer,
CycleWriteSerializer,
)
from .asset import FileAssetSerializer
from .issue import (
IssueCreateSerializer,
IssueActivitySerializer,
IssueCommentSerializer,
IssuePropertySerializer,
IssueAssigneeSerializer,
LabelSerializer,
IssueSerializer,
IssueFlatSerializer,
IssueStateSerializer,
IssueLinkSerializer,
IssueLiteSerializer,
IssueAttachmentSerializer,
IssueSubscriberSerializer,
IssueReactionSerializer,
CommentReactionSerializer,
IssueVoteSerializer,
IssueRelationSerializer,
RelatedIssueSerializer,
IssuePublicSerializer,
)
from .module import (
ModuleWriteSerializer,
ModuleSerializer,
ModuleIssueSerializer,
ModuleLinkSerializer,
ModuleFavoriteSerializer,
)
from .api_token import APITokenSerializer
from .integration import (
IntegrationSerializer,
WorkspaceIntegrationSerializer,
GithubIssueSyncSerializer,
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubCommentSyncSerializer,
SlackProjectSyncSerializer,
)
from .importer import ImporterSerializer
from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
from .estimate import (
EstimateSerializer,
EstimatePointSerializer,
EstimateReadSerializer,
)
from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
from .analytic import AnalyticViewSerializer
from .notification import NotificationSerializer
from .exporter import ExporterHistorySerializer

View File

@@ -0,0 +1,30 @@
from .base import BaseSerializer
from plane.db.models import AnalyticView
from plane.utils.issue_filters import issue_filters
class AnalyticViewSerializer(BaseSerializer):
class Meta:
model = AnalyticView
fields = "__all__"
read_only_fields = [
"workspace",
"query",
]
def create(self, validated_data):
query_params = validated_data.get("query_dict", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@@ -0,0 +1,14 @@
from .base import BaseSerializer
from plane.db.models import APIToken
class APITokenSerializer(BaseSerializer):
class Meta:
model = APIToken
fields = [
"label",
"user",
"user_type",
"workspace",
"created_at",
]

View File

@@ -0,0 +1,14 @@
from .base import BaseSerializer
from plane.db.models import FileAsset
class FileAssetSerializer(BaseSerializer):
class Meta:
model = FileAsset
fields = "__all__"
read_only_fields = [
"created_by",
"updated_by",
"created_at",
"updated_at",
]

View File

@@ -0,0 +1,58 @@
from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", None)
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields is not None:
self.fields = self._filter_fields(fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
# Convert the current serializer's fields and the allowed fields to sets.
existing = set(self.fields)
allowed = set(allowed)
# Remove fields from the serializer that aren't in the 'allowed' list.
for field_name in (existing - allowed):
self.fields.pop(field_name)
return self.fields

View File

@@ -0,0 +1,110 @@
# Django imports
from django.db.models.functions import TruncDate
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .issue import IssueStateSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
class Meta:
model = Cycle
fields = "__all__"
class CycleSerializer(BaseSerializer):
owned_by = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
assignees = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
def get_assignees(self, obj):
members = [
{
"avatar": assignee.avatar,
"display_name": assignee.display_name,
"id": assignee.id,
}
for issue_cycle in obj.issue_cycle.prefetch_related(
"issue__assignees"
).all()
for assignee in issue_cycle.issue.assignees.all()
]
# Use a set comprehension to return only the unique objects
unique_objects = {frozenset(item.items()) for item in members}
# Convert the set back to a list of dictionaries
unique_list = [dict(item) for item in unique_objects]
return unique_list
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"owned_by",
]
class CycleIssueSerializer(BaseSerializer):
issue_detail = IssueStateSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = CycleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"cycle",
]
class CycleFavoriteSerializer(BaseSerializer):
cycle_detail = CycleSerializer(source="cycle", read_only=True)
class Meta:
model = CycleFavorite
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"user",
]

View File

@@ -0,0 +1,44 @@
# Module imports
from .base import BaseSerializer
from plane.db.models import Estimate, EstimatePoint
from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
class EstimateSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = Estimate
fields = "__all__"
read_only_fields = [
"workspace",
"project",
]
class EstimatePointSerializer(BaseSerializer):
class Meta:
model = EstimatePoint
fields = "__all__"
read_only_fields = [
"estimate",
"workspace",
"project",
]
class EstimateReadSerializer(BaseSerializer):
points = EstimatePointSerializer(read_only=True, many=True)
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = Estimate
fields = "__all__"
read_only_fields = [
"points",
"name",
"description",
]

View File

@@ -0,0 +1,58 @@
# Third party frameworks
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue
class InboxSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
pending_issue_count = serializers.IntegerField(read_only=True)
class Meta:
model = Inbox
fields = "__all__"
read_only_fields = [
"project",
"workspace",
]
class InboxIssueSerializer(BaseSerializer):
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = InboxIssue
fields = "__all__"
read_only_fields = [
"project",
"workspace",
]
class InboxIssueLiteSerializer(BaseSerializer):
class Meta:
model = InboxIssue
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
read_only_fields = fields
class IssueStateInboxSerializer(BaseSerializer):
state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
bridge_id = serializers.UUIDField(read_only=True)
issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
class Meta:
model = Issue
fields = "__all__"

View File

@@ -0,0 +1,8 @@
from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
from .github import (
GithubRepositorySerializer,
GithubRepositorySyncSerializer,
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer

View File

@@ -0,0 +1,20 @@
# Module imports
from plane.api.serializers import BaseSerializer
from plane.db.models import Integration, WorkspaceIntegration
class IntegrationSerializer(BaseSerializer):
class Meta:
model = Integration
fields = "__all__"
read_only_fields = [
"verified",
]
class WorkspaceIntegrationSerializer(BaseSerializer):
integration_detail = IntegrationSerializer(read_only=True, source="integration")
class Meta:
model = WorkspaceIntegration
fields = "__all__"

View File

@@ -0,0 +1,45 @@
# Module imports
from plane.api.serializers import BaseSerializer
from plane.db.models import (
GithubIssueSync,
GithubRepository,
GithubRepositorySync,
GithubCommentSync,
)
class GithubRepositorySerializer(BaseSerializer):
class Meta:
model = GithubRepository
fields = "__all__"
class GithubRepositorySyncSerializer(BaseSerializer):
repo_detail = GithubRepositorySerializer(source="repository")
class Meta:
model = GithubRepositorySync
fields = "__all__"
class GithubIssueSyncSerializer(BaseSerializer):
class Meta:
model = GithubIssueSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
]
class GithubCommentSyncSerializer(BaseSerializer):
class Meta:
model = GithubCommentSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"repository_sync",
"issue_sync",
]

View File

@@ -0,0 +1,14 @@
# Module imports
from plane.api.serializers import BaseSerializer
from plane.db.models import SlackProjectSync
class SlackProjectSyncSerializer(BaseSerializer):
class Meta:
model = SlackProjectSync
fields = "__all__"
read_only_fields = [
"project",
"workspace",
"workspace_integration",
]

View File

@@ -0,0 +1,630 @@
# Django imports
from django.utils import timezone
# Third Party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
Issue,
IssueActivity,
IssueComment,
IssueProperty,
IssueAssignee,
IssueSubscriber,
IssueLabel,
Label,
CycleIssue,
Cycle,
Module,
ModuleIssue,
IssueLink,
IssueAttachment,
IssueReaction,
CommentReaction,
IssueVote,
IssueRelation,
)
class IssueFlatSerializer(BaseSerializer):
## Contain only flat fields
class Meta:
model = Issue
fields = [
"id",
"name",
"description",
"description_html",
"priority",
"start_date",
"target_date",
"sequence_id",
"sort_order",
"is_draft",
]
class IssueProjectLiteSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = Issue
fields = [
"id",
"project_detail",
"name",
"sequence_id",
]
read_only_fields = fields
##TODO: Find a better way to write this serializer
## Find a better approach to save manytomany?
class IssueCreateSerializer(BaseSerializer):
state_detail = StateSerializer(read_only=True, source="state")
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
assignees_list = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
labels_list = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
class Meta:
model = Issue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
return data
def create(self, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
default_assignee_id = self.context["default_assignee_id"]
issue = Issue.objects.create(**validated_data, project_id=project_id)
# Issue Audit Users
created_by_id = issue.created_by_id
updated_by_id = issue.updated_by_id
if assignees is not None and len(assignees):
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
assignee=user,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for user in assignees
],
batch_size=10,
)
else:
# Then assign it to default assignee
if default_assignee_id is not None:
IssueAssignee.objects.create(
assignee_id=default_assignee_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
if labels is not None and len(labels):
IssueLabel.objects.bulk_create(
[
IssueLabel(
label=label,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label in labels
],
batch_size=10,
)
return issue
def update(self, instance, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
# Related models
project_id = instance.project_id
workspace_id = instance.workspace_id
created_by_id = instance.created_by_id
updated_by_id = instance.updated_by_id
if assignees is not None:
IssueAssignee.objects.filter(issue=instance).delete()
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
assignee=user,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for user in assignees
],
batch_size=10,
)
if labels is not None:
IssueLabel.objects.filter(issue=instance).delete()
IssueLabel.objects.bulk_create(
[
IssueLabel(
label=label,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label in labels
],
batch_size=10,
)
# Time updation occues even when other related models are updated
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
class IssueActivitySerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = IssueActivity
fields = "__all__"
class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssuePropertySerializer(BaseSerializer):
class Meta:
model = IssueProperty
fields = "__all__"
read_only_fields = [
"user",
"workspace",
"project",
]
class LabelSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = Label
fields = "__all__"
read_only_fields = [
"workspace",
"project",
]
class LabelLiteSerializer(BaseSerializer):
class Meta:
model = Label
fields = [
"id",
"name",
"color",
]
class IssueLabelSerializer(BaseSerializer):
# label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel
fields = "__all__"
read_only_fields = [
"workspace",
"project",
]
class IssueRelationSerializer(BaseSerializer):
issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
class Meta:
model = IssueRelation
fields = [
"issue_detail",
"relation_type",
"related_issue",
"issue",
"id"
]
read_only_fields = [
"workspace",
"project",
]
class RelatedIssueSerializer(BaseSerializer):
issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
class Meta:
model = IssueRelation
fields = [
"issue_detail",
"relation_type",
"related_issue",
"issue",
"id"
]
read_only_fields = [
"workspace",
"project",
]
class IssueAssigneeSerializer(BaseSerializer):
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
class Meta:
model = IssueAssignee
fields = "__all__"
class CycleBaseSerializer(BaseSerializer):
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueCycleDetailSerializer(BaseSerializer):
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
class Meta:
model = CycleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class ModuleBaseSerializer(BaseSerializer):
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueModuleDetailSerializer(BaseSerializer):
module_detail = ModuleBaseSerializer(read_only=True, source="module")
class Meta:
model = ModuleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueLinkSerializer(BaseSerializer):
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
class Meta:
model = IssueLink
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"issue",
]
# Validation if url already exists
def create(self, validated_data):
if IssueLink.objects.filter(
url=validated_data.get("url"), issue_id=validated_data.get("issue_id")
).exists():
raise serializers.ValidationError(
{"error": "URL already exists for this Issue"}
)
return IssueLink.objects.create(**validated_data)
class IssueAttachmentSerializer(BaseSerializer):
class Meta:
model = IssueAttachment
fields = "__all__"
read_only_fields = [
"created_by",
"updated_by",
"created_at",
"updated_at",
"workspace",
"project",
"issue",
]
class IssueReactionSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta:
model = IssueReaction
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"actor",
]
class CommentReactionLiteSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta:
model = CommentReaction
fields = [
"id",
"reaction",
"comment",
"actor_detail",
]
class CommentReactionSerializer(BaseSerializer):
class Meta:
model = CommentReaction
fields = "__all__"
read_only_fields = ["workspace", "project", "comment", "actor"]
class IssueVoteSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta:
model = IssueVote
fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
read_only_fields = fields
class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
is_member = serializers.BooleanField(read_only=True)
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueStateFlatSerializer(BaseSerializer):
state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = Issue
fields = [
"id",
"sequence_id",
"name",
"state_detail",
"project_detail",
]
# Issue Serializer with state details
class IssueStateSerializer(BaseSerializer):
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
bridge_id = serializers.UUIDField(read_only=True)
attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True)
class Meta:
model = Issue
fields = "__all__"
class IssueSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateSerializer(read_only=True, source="state")
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
label_details = LabelSerializer(read_only=True, source="labels", many=True)
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
issue_cycle = IssueCycleDetailSerializer(read_only=True)
issue_module = IssueModuleDetailSerializer(read_only=True)
issue_link = IssueLinkSerializer(read_only=True, many=True)
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
class Meta:
model = Issue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueLiteSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state")
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
cycle_id = serializers.UUIDField(read_only=True)
module_id = serializers.UUIDField(read_only=True)
attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True)
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
class Meta:
model = Issue
fields = "__all__"
read_only_fields = [
"start_date",
"target_date",
"completed_at",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssuePublicSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state")
reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
votes = IssueVoteSerializer(read_only=True, many=True)
class Meta:
model = Issue
fields = [
"id",
"name",
"description_html",
"sequence_id",
"state",
"state_detail",
"project",
"project_detail",
"workspace",
"priority",
"target_date",
"reactions",
"votes",
]
read_only_fields = fields
class IssueSubscriberSerializer(BaseSerializer):
class Meta:
model = IssueSubscriber
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
]

View File

@@ -0,0 +1,194 @@
# Third Party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
User,
Module,
ModuleMember,
ModuleIssue,
ModuleLink,
ModuleFavorite,
)
class ModuleWriteSerializer(BaseSerializer):
members_list = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
raise serializers.ValidationError("Start date cannot exceed target date")
return data
def create(self, validated_data):
members = validated_data.pop("members_list", None)
project = self.context["project"]
module = Module.objects.create(**validated_data, project=project)
if members is not None:
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=module,
member=member,
project=project,
workspace=project.workspace,
created_by=module.created_by,
updated_by=module.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return module
def update(self, instance, validated_data):
members = validated_data.pop("members_list", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=instance,
member=member,
project=instance.project,
workspace=instance.project.workspace,
created_by=instance.created_by,
updated_by=instance.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return super().update(instance, validated_data)
class ModuleFlatSerializer(BaseSerializer):
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class ModuleIssueSerializer(BaseSerializer):
module_detail = ModuleFlatSerializer(read_only=True, source="module")
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = ModuleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
class ModuleLinkSerializer(BaseSerializer):
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
class Meta:
model = ModuleLink
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
# Validation if url already exists
def create(self, validated_data):
if ModuleLink.objects.filter(
url=validated_data.get("url"), module_id=validated_data.get("module_id")
).exists():
raise serializers.ValidationError(
{"error": "URL already exists for this Issue"}
)
return ModuleLink.objects.create(**validated_data)
class ModuleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
lead_detail = UserLiteSerializer(read_only=True, source="lead")
members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
link_module = ModuleLinkSerializer(read_only=True, many=True)
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class ModuleFavoriteSerializer(BaseSerializer):
module_detail = ModuleFlatSerializer(source="module", read_only=True)
class Meta:
model = ModuleFavorite
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"user",
]

View File

@@ -0,0 +1,12 @@
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from plane.db.models import Notification
class NotificationSerializer(BaseSerializer):
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
class Meta:
model = Notification
fields = "__all__"

View File

@@ -0,0 +1,111 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
class PageBlockSerializer(BaseSerializer):
issue_detail = IssueFlatSerializer(source="issue", read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = PageBlock
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"page",
]
class PageBlockLiteSerializer(BaseSerializer):
class Meta:
model = PageBlock
fields = "__all__"
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
labels_list = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
blocks = PageBlockLiteSerializer(read_only=True, many=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = Page
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"owned_by",
]
def create(self, validated_data):
labels = validated_data.pop("labels_list", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
**validated_data, project_id=project_id, owned_by_id=owned_by_id
)
if labels is not None:
PageLabel.objects.bulk_create(
[
PageLabel(
label=label,
page=page,
project_id=project_id,
workspace_id=page.workspace_id,
created_by_id=page.created_by_id,
updated_by_id=page.updated_by_id,
)
for label in labels
],
batch_size=10,
)
return page
def update(self, instance, validated_data):
labels = validated_data.pop("labels_list", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(
[
PageLabel(
label=label,
page=instance,
project_id=instance.project_id,
workspace_id=instance.workspace_id,
created_by_id=instance.created_by_id,
updated_by_id=instance.updated_by_id,
)
for label in labels
],
batch_size=10,
)
return super().update(instance, validated_data)
class PageFavoriteSerializer(BaseSerializer):
page_detail = PageSerializer(source="page", read_only=True)
class Meta:
model = PageFavorite
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"user",
]

View File

@@ -0,0 +1,214 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
Project,
ProjectMember,
ProjectMemberInvite,
ProjectIdentifier,
ProjectFavorite,
ProjectDeployBoard,
ProjectPublicMember,
)
class ProjectSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = Project
fields = "__all__"
read_only_fields = [
"workspace",
]
def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "":
raise serializers.ValidationError(detail="Project Identifier is required")
if ProjectIdentifier.objects.filter(
name=identifier, workspace_id=self.context["workspace_id"]
).exists():
raise serializers.ValidationError(detail="Project Identifier is taken")
project = Project.objects.create(
**validated_data, workspace_id=self.context["workspace_id"]
)
_ = ProjectIdentifier.objects.create(
name=project.identifier,
project=project,
workspace_id=self.context["workspace_id"],
)
return project
def update(self, instance, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
# If identifier is not passed update the project and return
if identifier == "":
project = super().update(instance, validated_data)
return project
# If no Project Identifier is found create it
project_identifier = ProjectIdentifier.objects.filter(
name=identifier, workspace_id=instance.workspace_id
).first()
if project_identifier is None:
project = super().update(instance, validated_data)
project_identifier = ProjectIdentifier.objects.filter(
project=project
).first()
if project_identifier is not None:
project_identifier.name = identifier
project_identifier.save()
return project
# If found check if the project_id to be updated and identifier project id is same
if project_identifier.project_id == instance.id:
# If same pass update
project = super().update(instance, validated_data)
return project
# If not same fail update
raise serializers.ValidationError(detail="Project Identifier is already taken")
class ProjectLiteSerializer(BaseSerializer):
class Meta:
model = Project
fields = [
"id",
"identifier",
"name",
"cover_image",
"icon_prop",
"emoji",
"description",
]
read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
members = serializers.SerializerMethodField()
def get_members(self, obj):
project_members = ProjectMember.objects.filter(project_id=obj.id).values(
"id",
"member_id",
"member__display_name",
"member__avatar",
)
return project_members
class Meta:
model = Project
fields = "__all__"
class ProjectDetailSerializer(BaseSerializer):
# workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
class Meta:
model = Project
fields = "__all__"
class ProjectMemberSerializer(BaseSerializer):
workspace = WorkspaceLiteSerializer(read_only=True)
project = ProjectLiteSerializer(read_only=True)
member = UserLiteSerializer(read_only=True)
class Meta:
model = ProjectMember
fields = "__all__"
class ProjectMemberAdminSerializer(BaseSerializer):
workspace = WorkspaceLiteSerializer(read_only=True)
project = ProjectLiteSerializer(read_only=True)
member = UserAdminLiteSerializer(read_only=True)
class Meta:
model = ProjectMember
fields = "__all__"
class ProjectMemberInviteSerializer(BaseSerializer):
project = ProjectLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
class Meta:
model = ProjectMemberInvite
fields = "__all__"
class ProjectIdentifierSerializer(BaseSerializer):
class Meta:
model = ProjectIdentifier
fields = "__all__"
class ProjectFavoriteSerializer(BaseSerializer):
class Meta:
model = ProjectFavorite
fields = "__all__"
read_only_fields = [
"workspace",
"user",
]
class ProjectMemberLiteSerializer(BaseSerializer):
member = UserLiteSerializer(read_only=True)
is_subscribed = serializers.BooleanField(read_only=True)
class Meta:
model = ProjectMember
fields = ["member", "id", "is_subscribed"]
read_only_fields = fields
class ProjectDeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = ProjectDeployBoard
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"anchor",
]
class ProjectPublicMemberSerializer(BaseSerializer):
class Meta:
model = ProjectPublicMember
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"member",
]

View File

@@ -0,0 +1,31 @@
# Module imports
from .base import BaseSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import State
class StateSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = State
fields = "__all__"
read_only_fields = [
"workspace",
"project",
]
class StateLiteSerializer(BaseSerializer):
class Meta:
model = State
fields = [
"id",
"name",
"color",
"group",
]
read_only_fields = fields

View File

@@ -0,0 +1,163 @@
# Third party imports
from rest_framework import serializers
# Module import
from .base import BaseSerializer
from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer):
class Meta:
model = User
fields = "__all__"
read_only_fields = [
"id",
"created_at",
"updated_at",
"is_superuser",
"is_staff",
"last_active",
"last_login_time",
"last_logout_time",
"last_login_ip",
"last_logout_ip",
"last_login_uagent",
"token_updated_at",
"is_onboarded",
"is_bot",
]
extra_kwargs = {"password": {"write_only": True}}
# If the user has already filled first name or last name then he is onboarded
def get_is_onboarded(self, obj):
return bool(obj.first_name) or bool(obj.last_name)
class UserMeSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"avatar",
"cover_image",
"date_joined",
"display_name",
"email",
"first_name",
"last_name",
"is_active",
"is_bot",
"is_email_verified",
"is_managed",
"is_onboarded",
"is_tour_completed",
"mobile_number",
"role",
"onboarding_step",
"user_timezone",
"username",
"theme",
"last_workspace_id",
]
read_only_fields = fields
class UserMeSettingsSerializer(BaseSerializer):
workspace = serializers.SerializerMethodField()
class Meta:
model = User
fields = [
"id",
"email",
"workspace",
]
read_only_fields = fields
def get_workspace(self, obj):
workspace_invites = WorkspaceMemberInvite.objects.filter(
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.get(
pk=obj.last_workspace_id, workspace_member__member=obj.id
)
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug,
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"invites": workspace_invites,
}
else:
fallback_workspace = (
Workspace.objects.filter(workspace_member__member_id=obj.id)
.order_by("created_at")
.first()
)
return {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": fallback_workspace.id
if fallback_workspace is not None
else None,
"fallback_workspace_slug": fallback_workspace.slug
if fallback_workspace is not None
else None,
"invites": workspace_invites,
}
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"first_name",
"last_name",
"avatar",
"is_bot",
"display_name",
]
read_only_fields = [
"id",
"is_bot",
]
class UserAdminLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"first_name",
"last_name",
"avatar",
"is_bot",
"display_name",
"email",
]
read_only_fields = [
"id",
"is_bot",
]
class ChangePasswordSerializer(serializers.Serializer):
model = User
"""
Serializer for password change endpoint.
"""
old_password = serializers.CharField(required=True)
new_password = serializers.CharField(required=True)
class ResetPasswordSerializer(serializers.Serializer):
model = User
"""
Serializer for password change endpoint.
"""
new_password = serializers.CharField(required=True)
confirm_password = serializers.CharField(required=True)

View File

@@ -0,0 +1,83 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import GlobalView, IssueView, IssueViewFavorite
from plane.utils.issue_filters import issue_filters
class GlobalViewSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = GlobalView
fields = "__all__"
read_only_fields = [
"workspace",
"query",
]
def create(self, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
return GlobalView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
class IssueViewSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = IssueView
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"query",
]
def create(self, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
class IssueViewFavoriteSerializer(BaseSerializer):
view_detail = IssueViewSerializer(source="issue_view", read_only=True)
class Meta:
model = IssueViewFavorite
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"user",
]

View File

@@ -0,0 +1,138 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
User,
Workspace,
WorkspaceMember,
Team,
TeamMember,
WorkspaceMemberInvite,
WorkspaceTheme,
)
class WorkSpaceSerializer(BaseSerializer):
owner = UserLiteSerializer(read_only=True)
total_members = serializers.IntegerField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
class Meta:
model = Workspace
fields = "__all__"
read_only_fields = [
"id",
"created_by",
"updated_by",
"created_at",
"updated_at",
"owner",
]
class WorkspaceLiteSerializer(BaseSerializer):
class Meta:
model = Workspace
fields = [
"name",
"slug",
"id",
]
read_only_fields = fields
class WorkSpaceMemberSerializer(BaseSerializer):
member = UserLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberMeSerializer(BaseSerializer):
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkSpaceMemberInviteSerializer(BaseSerializer):
workspace = WorkSpaceSerializer(read_only=True)
total_members = serializers.IntegerField(read_only=True)
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
class Meta:
model = WorkspaceMemberInvite
fields = "__all__"
class TeamSerializer(BaseSerializer):
members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
class Meta:
model = Team
fields = "__all__"
read_only_fields = [
"workspace",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
def create(self, validated_data, **kwargs):
if "members" in validated_data:
members = validated_data.pop("members")
workspace = self.context["workspace"]
team = Team.objects.create(**validated_data, workspace=workspace)
team_members = [
TeamMember(member=member, team=team, workspace=workspace)
for member in members
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
else:
team = Team.objects.create(**validated_data)
return team
def update(self, instance, validated_data):
if "members" in validated_data:
members = validated_data.pop("members")
TeamMember.objects.filter(team=instance).delete()
team_members = [
TeamMember(member=member, team=instance, workspace=instance.workspace)
for member in members
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
else:
return super().update(instance, validated_data)
class WorkspaceThemeSerializer(BaseSerializer):
class Meta:
model = WorkspaceTheme
fields = "__all__"
read_only_fields = [
"workspace",
"actor",
]

View File

@@ -0,0 +1,50 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .public_board import urlpatterns as public_board_urls
from .release_note import urlpatterns as release_note_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .unsplash import urlpatterns as unsplash_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .workspace import urlpatterns as workspace_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
*authentication_urls,
*configuration_urls,
*cycle_urls,
*estimate_urls,
*gpt_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
*issue_urls,
*module_urls,
*notification_urls,
*page_urls,
*project_urls,
*public_board_urls,
*release_note_urls,
*search_urls,
*state_urls,
*unsplash_urls,
*user_urls,
*view_urls,
*workspace_urls,
]

View File

@@ -0,0 +1,46 @@
from django.urls import path
from plane.api.views import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
DefaultAnalyticsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view(
{"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
]

View File

@@ -0,0 +1,31 @@
from django.urls import path
from plane.api.views import (
FileAssetEndpoint,
UserAssetsEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/file-assets/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"users/file-assets/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
path(
"users/file-assets/<str:asset_key>/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
]

View File

@@ -0,0 +1,68 @@
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
from plane.api.views import (
# Authentication
SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
MagicSignInGenerateEndpoint,
OauthEndpoint,
## End Authentication
# Auth Extended
ForgotPasswordEndpoint,
VerifyEmailEndpoint,
ResetPasswordEndpoint,
RequestEmailVerificationEndpoint,
ChangePasswordEndpoint,
## End Auth Extender
# API Tokens
ApiTokenEndpoint,
## End API Tokens
)
urlpatterns = [
# Social Auth
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
"magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
),
path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"),
# Email verification
path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
path(
"request-email-verify/",
RequestEmailVerificationEndpoint.as_view(),
name="request-reset-email",
),
# Password Manipulation
path(
"users/me/change-password/",
ChangePasswordEndpoint.as_view(),
name="change-password",
),
path(
"reset-password/<uidb64>/<token>/",
ResetPasswordEndpoint.as_view(),
name="password-reset",
),
path(
"forgot-password/",
ForgotPasswordEndpoint.as_view(),
name="forgot-password",
),
# API Tokens
path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
path("api-tokens/<uuid:pk>/", ApiTokenEndpoint.as_view(), name="api-tokens"),
## End API Tokens
]

View File

@@ -0,0 +1,12 @@
from django.urls import path
from plane.api.views import ConfigurationEndpoint
urlpatterns = [
path(
"configs/",
ConfigurationEndpoint.as_view(),
name="configuration",
),
]

View File

@@ -0,0 +1,87 @@
from django.urls import path
from plane.api.views import (
CycleViewSet,
CycleIssueViewSet,
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:pk>/",
CycleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
CycleDateCheckEndpoint.as_view(),
name="project-cycle-date",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
CycleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
CycleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueEndpoint.as_view(),
name="transfer-issues",
),
]

View File

@@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
ProjectEstimatePointEndpoint.as_view(),
name="project-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
BulkEstimatePointEndpoint.as_view(
{
"get": "list",
"post": "create",
}
),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
BulkEstimatePointEndpoint.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="bulk-create-estimate-points",
),
]

View File

@@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@@ -0,0 +1,37 @@
from django.urls import path
from plane.api.views import (
ServiceIssueImportSummaryEndpoint,
ImportServiceEndpoint,
UpdateServiceImportStatusEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/importers/<str:service>/",
ServiceIssueImportSummaryEndpoint.as_view(),
name="importer-summary",
),
path(
"workspaces/<str:slug>/projects/importers/<str:service>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/importers/<str:service>/<uuid:pk>/",
ImportServiceEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/service/<str:service>/importers/<uuid:importer_id>/",
UpdateServiceImportStatusEndpoint.as_view(),
name="importer-status",
),
]

View File

@@ -0,0 +1,53 @@
from django.urls import path
from plane.api.views import (
InboxViewSet,
InboxIssueViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
InboxViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
InboxViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssueViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
]

View File

@@ -0,0 +1,150 @@
from django.urls import path
from plane.api.views import (
IntegrationViewSet,
WorkspaceIntegrationViewSet,
GithubRepositoriesEndpoint,
GithubRepositorySyncViewSet,
GithubIssueSyncViewSet,
GithubCommentSyncViewSet,
BulkCreateGithubIssueSyncEndpoint,
SlackProjectSyncViewSet,
)
urlpatterns = [
path(
"integrations/",
IntegrationViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="integrations",
),
path(
"integrations/<uuid:pk>/",
IntegrationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "list",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<str:provider>/",
WorkspaceIntegrationViewSet.as_view(
{
"post": "create",
}
),
name="workspace-integrations",
),
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:pk>/provider/",
WorkspaceIntegrationViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="workspace-integrations",
),
# Github Integrations
path(
"workspaces/<str:slug>/workspace-integrations/<uuid:workspace_integration_id>/github-repositories/",
GithubRepositoriesEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/",
GithubRepositorySyncViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/github-repository-sync/<uuid:pk>/",
GithubRepositorySyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/",
GithubIssueSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/bulk-create-github-issue-sync/",
BulkCreateGithubIssueSyncEndpoint.as_view(),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:pk>/",
GithubIssueSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/",
GithubCommentSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/github-repository-sync/<uuid:repo_sync_id>/github-issue-sync/<uuid:issue_sync_id>/github-comment-sync/<uuid:pk>/",
GithubCommentSyncViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
),
## End Github Integrations
# Slack Integration
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/",
SlackProjectSyncViewSet.as_view(
{
"post": "create",
"get": "list",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/workspace-integrations/<uuid:workspace_integration_id>/project-slack-sync/<uuid:pk>/",
SlackProjectSyncViewSet.as_view(
{
"delete": "destroy",
"get": "retrieve",
}
),
),
## End Slack Integration
]

View File

@@ -0,0 +1,332 @@
from django.urls import path
from plane.api.views import (
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
BulkImportIssuesEndpoint,
UserWorkSpaceIssues,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
ExportIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
IssueSubscriberViewSet,
IssueReactionViewSet,
CommentReactionViewSet,
IssuePropertyViewSet,
IssueArchiveViewSet,
IssueRelationViewSet,
IssueDraftViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
LabelViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
LabelViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
BulkCreateIssueLabelsEndpoint.as_view(),
name="project-bulk-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-issues/<str:service>/",
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),
name="sub-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
IssueLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
IssueLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/export-issues/",
ExportIssuesEndpoint.as_view(),
name="export-issues",
),
## End Issues
## Issue Activity
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
IssueActivityEndpoint.as_view(),
name="project-issue-history",
),
## Issue Activity
## IssueComments
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-comment",
),
## End IssueComments
# Issue Subscribers
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
IssueSubscriberViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
IssueSubscriberViewSet.as_view(
{
"get": "subscription_status",
"post": "subscribe",
"delete": "unsubscribe",
}
),
name="project-issue-subscribers",
),
## End Issue Subscribers
# Issue Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-reactions",
),
## End Issue Reactions
# Comment Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-comment-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-issue-comment-reactions",
),
## End Comment Reactions
## IssueProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/",
IssuePropertyViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-roadmap",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-properties/<uuid:pk>/",
IssuePropertyViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-roadmap",
),
## IssueProperty Ebd
## Issue Archives
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
IssueArchiveViewSet.as_view(
{
"get": "list",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
IssueArchiveViewSet.as_view(
{
"post": "unarchive",
}
),
name="project-issue-archive",
),
## End Issue Archives
## Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
IssueRelationViewSet.as_view(
{
"post": "create",
}
),
name="issue-relation",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/<uuid:pk>/",
IssueRelationViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-relation",
),
## End Issue Relation
## Issue Drafts
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/",
IssueDraftViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-draft",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-drafts/<uuid:pk>/",
IssueDraftViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-draft",
),
]

View File

@@ -0,0 +1,104 @@
from django.urls import path
from plane.api.views import (
ModuleViewSet,
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
BulkImportModulesEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
ModuleIssueViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:pk>/",
ModuleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
ModuleLinkViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
ModuleLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
ModuleFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
ModuleFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-import-modules/<str:service>/",
BulkImportModulesEndpoint.as_view(),
name="bulk-modules-create",
),
]

View File

@@ -0,0 +1,66 @@
from django.urls import path
from plane.api.views import (
NotificationViewSet,
UnreadNotificationEndpoint,
MarkAllReadNotificationViewSet,
)
urlpatterns = [
path(
"workspaces/<str:slug>/users/notifications/",
NotificationViewSet.as_view(
{
"get": "list",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/",
NotificationViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/read/",
NotificationViewSet.as_view(
{
"post": "mark_read",
"delete": "mark_unread",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/<uuid:pk>/archive/",
NotificationViewSet.as_view(
{
"post": "archive",
"delete": "unarchive",
}
),
name="notifications",
),
path(
"workspaces/<str:slug>/users/notifications/unread/",
UnreadNotificationEndpoint.as_view(),
name="unread-notifications",
),
path(
"workspaces/<str:slug>/users/notifications/mark-all-read/",
MarkAllReadNotificationViewSet.as_view(
{
"post": "create",
}
),
name="mark-all-read-notifications",
),
]

View File

@@ -0,0 +1,79 @@
from django.urls import path
from plane.api.views import (
PageViewSet,
PageBlockViewSet,
PageFavoriteViewSet,
CreateIssueFromPageBlockEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/",
PageViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
PageViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/",
PageBlockViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:pk>/",
PageBlockViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-page-blocks",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/",
PageFavoriteViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
PageFavoriteViewSet.as_view(
{
"delete": "destroy",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/page-blocks/<uuid:page_block_id>/issues/",
CreateIssueFromPageBlockEndpoint.as_view(),
name="page-block-issues",
),
]

View File

@@ -0,0 +1,144 @@
from django.urls import path
from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberEndpoint,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/",
ProjectViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project",
),
path(
"workspaces/<str:slug>/project-identifiers/",
ProjectIdentifierEndpoint.as_view(),
name="project-identifiers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invite/",
InviteProjectEndpoint.as_view(),
name="invite-project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list"}),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
ProjectMemberViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberEndpoint.as_view(),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
AddMemberToProjectEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),
name="project-join",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/team-invite/",
AddTeamToProjectEndpoint.as_view(),
name="projects",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/",
ProjectMemberInvitationsViewset.as_view({"get": "list"}),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/invitations/<uuid:pk>/",
ProjectMemberInvitationsViewset.as_view(
{
"get": "retrieve",
"delete": "destroy",
}
),
name="project-member-invite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-views/",
ProjectUserViewsEndpoint.as_view(),
name="project-view",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/me/",
ProjectMemberUserEndpoint.as_view(),
name="project-member-view",
),
path(
"workspaces/<str:slug>/user-favorite-projects/",
ProjectFavoritesViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/user-favorite-projects/<uuid:project_id>/",
ProjectFavoritesViewSet.as_view(
{
"delete": "destroy",
}
),
name="project-favorite",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/leave/",
LeaveProjectEndpoint.as_view(),
name="leave-project",
),
path(
"project-covers/",
ProjectPublicCoverImagesEndpoint.as_view(),
name="project-covers",
),
]

View File

@@ -0,0 +1,151 @@
from django.urls import path
from plane.api.views import (
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectIssuesPublicEndpoint,
IssueRetrievePublicEndpoint,
IssueCommentPublicViewSet,
IssueReactionPublicViewSet,
CommentReactionPublicViewSet,
InboxIssuePublicViewSet,
IssueVotePublicViewSet,
WorkspaceProjectDeployBoardEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/",
ProjectDeployBoardViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-deploy-board",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-deploy-boards/<uuid:pk>/",
ProjectDeployBoardViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/settings/",
ProjectDeployBoardPublicSettingsEndpoint.as_view(),
name="project-deploy-board-settings",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/",
ProjectIssuesPublicEndpoint.as_view(),
name="project-deploy-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/",
IssueRetrievePublicEndpoint.as_view(),
name="workspace-project-boards",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentPublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="issue-comments-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="issue-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionPublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionPublicViewSet.as_view(
{
"delete": "destroy",
}
),
name="comment-reactions-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/",
InboxIssuePublicViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:pk>/",
InboxIssuePublicViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="inbox-issue",
),
path(
"public/workspaces/<str:slug>/project-boards/<uuid:project_id>/issues/<uuid:issue_id>/votes/",
IssueVotePublicViewSet.as_view(
{
"get": "list",
"post": "create",
"delete": "destroy",
}
),
name="issue-vote-project-board",
),
path(
"public/workspaces/<str:slug>/project-boards/",
WorkspaceProjectDeployBoardEndpoint.as_view(),
name="workspace-project-boards",
),
]

View File

@@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import ReleaseNotesEndpoint
urlpatterns = [
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
]

View File

@@ -0,0 +1,21 @@
from django.urls import path
from plane.api.views import (
GlobalSearchEndpoint,
IssueSearchEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/search/",
GlobalSearchEndpoint.as_view(),
name="global-search",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/search-issues/",
IssueSearchEndpoint.as_view(),
name="project-issue-search",
),
]

View File

@@ -0,0 +1,30 @@
from django.urls import path
from plane.api.views import StateViewSet
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
StateViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-states",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/",
StateViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-state",
),
]

View File

@@ -0,0 +1,13 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
]

Some files were not shown because too many files have changed in this diff Show More