diff --git a/.dockerignore b/.dockerignore index 32158cd9b..fb5a407e9 100644 --- a/.dockerignore +++ b/.dockerignore @@ -16,3 +16,48 @@ out/ **/out/ dist/ **/dist/ +# Logs +npm-debug.log* +pnpm-debug.log* +.pnpm-debug.log* +yarn-debug.log* +yarn-error.log* + +# OS junk +.DS_Store +Thumbs.db + +# Editor settings +.vscode +.idea + +# Coverage and test output +coverage/ +**/coverage/ +*.lcov +.junit/ +test-results/ + +# Caches and build artifacts +.cache/ +**/.cache/ +storybook-static/ +*storybook.log +*.tsbuildinfo + +# Local env and secrets +.env.local +.env.development.local +.env.test.local +.env.production.local +.secrets +tmp/ +temp/ + +# Database/cache dumps +*.rdb +*.rdb.gz + +# Misc +*.pem +*.key diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml index acd9348d2..087a012d4 100644 --- a/.github/workflows/build-branch.yml +++ b/.github/workflows/build-branch.yml @@ -35,6 +35,10 @@ on: - preview - canary +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + env: TARGET_BRANCH: ${{ github.ref_name }} ARM64_BUILD: ${{ github.event.inputs.arm64 }} @@ -268,15 +272,14 @@ jobs: if: ${{ needs.branch_build_setup.outputs.aio_build == 'true' }} name: Build-Push AIO Docker Image runs-on: ubuntu-22.04 - needs: [ - branch_build_setup, - branch_build_push_admin, - branch_build_push_web, - branch_build_push_space, - branch_build_push_live, - branch_build_push_api, - branch_build_push_proxy - ] + needs: + - branch_build_setup + - branch_build_push_admin + - branch_build_push_web + - branch_build_push_space + - branch_build_push_live + - branch_build_push_api + - branch_build_push_proxy steps: - name: Checkout Files uses: actions/checkout@v4 @@ -285,7 +288,7 @@ jobs: id: prepare_aio_assets run: | cd deployments/aio/community - + if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then aio_version=${{ needs.branch_build_setup.outputs.release_version }} else @@ -324,7 +327,14 @@ jobs: upload_build_assets: name: Upload Build Assets runs-on: ubuntu-22.04 - needs: [branch_build_setup, branch_build_push_admin, branch_build_push_web, branch_build_push_space, branch_build_push_live, branch_build_push_api, branch_build_push_proxy] + needs: + - branch_build_setup + - branch_build_push_admin + - branch_build_push_web + - branch_build_push_space + - branch_build_push_live + - branch_build_push_api + - branch_build_push_proxy steps: - name: Checkout Files uses: actions/checkout@v4 @@ -397,4 +407,3 @@ jobs: ${{ github.workspace }}/deployments/cli/community/docker-compose.yml ${{ github.workspace }}/deployments/cli/community/variables.env ${{ github.workspace }}/deployments/swarm/community/swarm.sh - diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml deleted file mode 100644 index acbd9d26e..000000000 --- a/.github/workflows/build-test-pull-request.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: Build and Lint on Pull Request - -on: - workflow_dispatch: - pull_request: - types: ["opened", "synchronize", "ready_for_review"] - -jobs: - lint-server: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.x" # Specify the Python version you need - - name: Install Pylint - run: python -m pip install ruff - - name: Install Server Dependencies - run: cd apps/server && pip install -r requirements.txt - - name: Lint apps/server - run: ruff check --fix apps/server - - lint-admin: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn lint --filter=admin - - lint-space: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn lint --filter=space - - lint-web: - if: github.event.pull_request.draft == false - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn lint --filter=web - - build-admin: - needs: lint-admin - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn build --filter=admin - - build-space: - needs: lint-space - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn build --filter=space - - build-web: - needs: lint-web - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 20.x - - run: yarn install - - run: yarn build --filter=web diff --git a/.github/workflows/check-version.yml b/.github/workflows/check-version.yml index ca8b6f8b3..855ee359f 100644 --- a/.github/workflows/check-version.yml +++ b/.github/workflows/check-version.yml @@ -17,8 +17,6 @@ jobs: - name: Setup Node.js uses: actions/setup-node@v4 - with: - node-version: '18' - name: Get PR Branch version run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV diff --git a/.github/workflows/pull-request-build-lint-api.yml b/.github/workflows/pull-request-build-lint-api.yml new file mode 100644 index 000000000..50d105ef5 --- /dev/null +++ b/.github/workflows/pull-request-build-lint-api.yml @@ -0,0 +1,40 @@ +name: Build and lint API + +on: + workflow_dispatch: + pull_request: + branches: + - "preview" + types: + - "opened" + - "synchronize" + - "ready_for_review" + - "review_requested" + - "reopened" + paths: + - "apps/api/**" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + lint-api: + name: Lint API + runs-on: ubuntu-latest + timeout-minutes: 25 + if: | + github.event.pull_request.draft == false && + github.event.pull_request.requested_reviewers != null + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install Pylint + run: python -m pip install ruff + - name: Install API Dependencies + run: cd apps/api && pip install -r requirements.txt + - name: Lint apps/api + run: ruff check --fix apps/api diff --git a/.github/workflows/pull-request-build-lint-web-apps.yml b/.github/workflows/pull-request-build-lint-web-apps.yml new file mode 100644 index 000000000..435ec2093 --- /dev/null +++ b/.github/workflows/pull-request-build-lint-web-apps.yml @@ -0,0 +1,53 @@ +name: Build and lint web apps + +on: + workflow_dispatch: + pull_request: + branches: + - "preview" + types: + - "opened" + - "synchronize" + - "ready_for_review" + - "review_requested" + - "reopened" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build-and-lint: + name: Build and lint web apps + runs-on: ubuntu-latest + timeout-minutes: 25 + if: | + github.event.pull_request.draft == false && + github.event.pull_request.requested_reviewers != null + env: + TURBO_SCM_BASE: ${{ github.event.pull_request.base.sha }} + TURBO_SCM_HEAD: ${{ github.sha }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 50 + filter: blob:none + + - name: Set up Node.js + uses: actions/setup-node@v4 + + - name: Enable Corepack and pnpm + run: corepack enable pnpm + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Lint Affected + run: pnpm turbo run check:lint --affected + + - name: Check Affected format + run: pnpm turbo run check:format --affected + + - name: Build Affected + run: pnpm turbo run build --affected diff --git a/.gitignore b/.gitignore index a6a407ba9..4baa3495a 100644 --- a/.gitignore +++ b/.gitignore @@ -24,11 +24,13 @@ out/ .DS_Store *.pem .history +tsconfig.tsbuildinfo # Debug npm-debug.log* yarn-debug.log* yarn-error.log* +pnpm-debug.log* .pnpm-debug.log* # Local env files @@ -60,6 +62,7 @@ node_modules/ assets/dist/ npm-debug.log yarn-error.log +pnpm-debug.log # Editor directories and files .idea @@ -75,10 +78,9 @@ package-lock.json # lock files package-lock.json -pnpm-lock.yaml -pnpm-workspace.yaml -.npmrc + + .secrets tmp/ @@ -95,3 +97,5 @@ dev-editor # Redis *.rdb *.rdb.gz + +storybook-static diff --git a/.npmrc b/.npmrc new file mode 100644 index 000000000..d652acc3b --- /dev/null +++ b/.npmrc @@ -0,0 +1,34 @@ +# Enforce pnpm workspace behavior and allow Turbo's lifecycle hooks if scripts are disabled +# This repo uses pnpm with workspaces. + +# Prefer linking local workspace packages when available +prefer-workspace-packages=true +link-workspace-packages=true +shared-workspace-lockfile=true + +# Make peer installs smoother across the monorepo +auto-install-peers=true +strict-peer-dependencies=false + +# If scripts are disabled (e.g., CI with --ignore-scripts), allowlisted packages can still run their hooks +# Turbo occasionally performs postinstall tasks for optimal performance +# moved to pnpm-workspace.yaml: onlyBuiltDependencies (e.g., allow turbo) + +public-hoist-pattern[]=*eslint* +public-hoist-pattern[]=prettier +public-hoist-pattern[]=typescript + +# Reproducible installs across CI and dev +prefer-frozen-lockfile=true + +# Prefer resolving to highest versions in monorepo to reduce duplication +resolution-mode=highest + +# Speed up native module builds by caching side effects +side-effects-cache=true + +# Speed up local dev by reusing local store when possible +prefer-offline=true + +# Ensure workspace protocol is used when adding internal deps +save-workspace-protocol=true diff --git a/.yarnrc.yml b/.yarnrc.yml deleted file mode 100644 index 3186f3f07..000000000 --- a/.yarnrc.yml +++ /dev/null @@ -1 +0,0 @@ -nodeLinker: node-modules diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c401c3c2c..39eb4e800 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -73,7 +73,7 @@ docker compose -f docker-compose-local.yml up 4. Start web apps: ```bash -yarn dev +pnpm dev ``` 5. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin diff --git a/README.md b/README.md index da57f38d8..f6b364bef 100644 --- a/README.md +++ b/README.md @@ -2,11 +2,10 @@

- Plane Logo + Plane Logo

-

Plane

-

Open-source project management that unlocks customer value

+

Modern project management for all teams

@@ -25,14 +24,7 @@

Plane Screens - - - Plane Screens @@ -48,13 +40,13 @@ Meet [Plane](https://plane.so/), an open-source project management tool to track Getting started with Plane is simple. Choose the setup that works best for you: - **Plane Cloud** -Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure. + Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure. - **Self-host Plane** -Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started. + Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started. -| Installation methods | Docs link | -| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Installation methods | Docs link | +| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | Docker | [![Docker](https://img.shields.io/badge/docker-%230db7ed.svg?style=for-the-badge&logo=docker&logoColor=white)](https://developers.plane.so/self-hosting/methods/docker-compose) | | Kubernetes | [![Kubernetes](https://img.shields.io/badge/kubernetes-%23326ce5.svg?style=for-the-badge&logo=kubernetes&logoColor=white)](https://developers.plane.so/self-hosting/methods/kubernetes) | @@ -63,58 +55,58 @@ Prefer full control over your data and infrastructure? Install and run Plane on ## 🌟 Features - **Issues** -Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues. + Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues. - **Cycles** -Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools. + Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools. - **Modules** -Simplify complex projects by dividing them into smaller, manageable modules. + Simplify complex projects by dividing them into smaller, manageable modules. - **Views** -Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease. + Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease. - **Pages** -Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items. + Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items. - **Analytics** -Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward. + Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward. - **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution. - ## 🛠️ Local development See [CONTRIBUTING](./CONTRIBUTING.md) ## ⚙️ Built with + [![Next JS](https://img.shields.io/badge/next.js-000000?style=for-the-badge&logo=nextdotjs&logoColor=white)](https://nextjs.org/) [![Django](https://img.shields.io/badge/Django-092E20?style=for-the-badge&logo=django&logoColor=green)](https://www.djangoproject.com/) [![Node JS](https://img.shields.io/badge/node.js-339933?style=for-the-badge&logo=Node.js&logoColor=white)](https://nodejs.org/en) ## 📸 Screenshots -

+

Plane Views

-

- - - -

+ +

+

+ + Plane Cycles and Modules @@ -123,7 +115,7 @@ See [CONTRIBUTING](./CONTRIBUTING.md)

Plane Analytics @@ -132,25 +124,16 @@ See [CONTRIBUTING](./CONTRIBUTING.md)

Plane Pages

-

- - Plane Command Menu - -

-

## 📝 Documentation + Explore Plane's [product documentation](https://docs.plane.so/) and [developer documentation](https://developers.plane.so/) to learn about features, setup, and usage. ## ❤️ Community @@ -186,6 +169,6 @@ Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CON - ## License + This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt). diff --git a/apps/admin/.eslintignore b/apps/admin/.eslintignore new file mode 100644 index 000000000..27e50ad7c --- /dev/null +++ b/apps/admin/.eslintignore @@ -0,0 +1,12 @@ +.next/* +out/* +public/* +dist/* +node_modules/* +.turbo/* +.env* +.env +.env.local +.env.development +.env.production +.env.test \ No newline at end of file diff --git a/apps/admin/.eslintrc.js b/apps/admin/.eslintrc.js index 666f5ab50..1662fabf7 100644 --- a/apps/admin/.eslintrc.js +++ b/apps/admin/.eslintrc.js @@ -1,5 +1,4 @@ module.exports = { root: true, extends: ["@plane/eslint-config/next.js"], - parser: "@typescript-eslint/parser", }; diff --git a/apps/admin/.prettierignore b/apps/admin/.prettierignore index 43e8a7b8f..3cd6b08a0 100644 --- a/apps/admin/.prettierignore +++ b/apps/admin/.prettierignore @@ -2,5 +2,5 @@ .vercel .tubro out/ -dis/ -build/ \ No newline at end of file +dist/ +build/ diff --git a/apps/admin/Dockerfile.admin b/apps/admin/Dockerfile.admin index 01884206e..6bfa0765f 100644 --- a/apps/admin/Dockerfile.admin +++ b/apps/admin/Dockerfile.admin @@ -1,5 +1,11 @@ +# syntax=docker/dockerfile:1.7 FROM node:22-alpine AS base +# Setup pnpm package manager with corepack and configure global bin directory for caching +ENV PNPM_HOME="/pnpm" +ENV PATH="$PNPM_HOME:$PATH" +RUN corepack enable + # ***************************************************************************** # STAGE 1: Build the project # ***************************************************************************** @@ -7,7 +13,8 @@ FROM base AS builder RUN apk add --no-cache libc6-compat WORKDIR /app -RUN yarn global add turbo +ARG TURBO_VERSION=2.5.6 +RUN corepack enable pnpm && pnpm add -g turbo@${TURBO_VERSION} COPY . . RUN turbo prune --scope=admin --docker @@ -22,11 +29,13 @@ WORKDIR /app COPY .gitignore .gitignore COPY --from=builder /app/out/json/ . -COPY --from=builder /app/out/yarn.lock ./yarn.lock -RUN yarn install --network-timeout 500000 +COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml +RUN corepack enable pnpm +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm fetch --store-dir=/pnpm/store COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install --offline --frozen-lockfile --store-dir=/pnpm/store ARG NEXT_PUBLIC_API_BASE_URL="" ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL @@ -49,7 +58,7 @@ ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL ENV NEXT_TELEMETRY_DISABLED=1 ENV TURBO_TELEMETRY_DISABLED=1 -RUN yarn turbo run build --filter=admin +RUN pnpm turbo run build --filter=admin # ***************************************************************************** # STAGE 3: Copy the project and start it @@ -91,4 +100,4 @@ ENV TURBO_TELEMETRY_DISABLED=1 EXPOSE 3000 -CMD ["node", "apps/admin/server.js"] \ No newline at end of file +CMD ["node", "apps/admin/server.js"] diff --git a/apps/admin/Dockerfile.dev b/apps/admin/Dockerfile.dev index edf82d227..0b82669c4 100644 --- a/apps/admin/Dockerfile.dev +++ b/apps/admin/Dockerfile.dev @@ -5,8 +5,8 @@ WORKDIR /app COPY . . -RUN yarn global add turbo -RUN yarn install +RUN corepack enable pnpm && pnpm add -g turbo +RUN pnpm install ENV NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode" @@ -14,4 +14,4 @@ EXPOSE 3000 VOLUME [ "/app/node_modules", "/app/admin/node_modules" ] -CMD ["yarn", "dev", "--filter=admin"] +CMD ["pnpm", "dev", "--filter=admin"] diff --git a/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx index cdcfcd61b..f0b464acb 100644 --- a/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx +++ b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx @@ -66,9 +66,11 @@ const InstanceGitlabAuthenticationPage = observer(() => { { - Boolean(parseInt(enableGitlabConfig)) === true - ? updateConfig("IS_GITLAB_ENABLED", "0") - : updateConfig("IS_GITLAB_ENABLED", "1"); + if (Boolean(parseInt(enableGitlabConfig)) === true) { + updateConfig("IS_GITLAB_ENABLED", "0"); + } else { + updateConfig("IS_GITLAB_ENABLED", "1"); + } }} size="sm" disabled={isSubmitting || !formattedConfig} diff --git a/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx index 6ac4ea09b..7cf42cb57 100644 --- a/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx +++ b/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx @@ -67,9 +67,11 @@ const InstanceGoogleAuthenticationPage = observer(() => { { - Boolean(parseInt(enableGoogleConfig)) === true - ? updateConfig("IS_GOOGLE_ENABLED", "0") - : updateConfig("IS_GOOGLE_ENABLED", "1"); + if (Boolean(parseInt(enableGoogleConfig)) === true) { + updateConfig("IS_GOOGLE_ENABLED", "0"); + } else { + updateConfig("IS_GOOGLE_ENABLED", "1"); + } }} size="sm" disabled={isSubmitting || !formattedConfig} diff --git a/apps/admin/app/(all)/(dashboard)/email/page.tsx b/apps/admin/app/(all)/(dashboard)/email/page.tsx index 445ff2ec7..792bafe35 100644 --- a/apps/admin/app/(all)/(dashboard)/email/page.tsx +++ b/apps/admin/app/(all)/(dashboard)/email/page.tsx @@ -9,7 +9,7 @@ import { useInstance } from "@/hooks/store"; // components import { InstanceEmailForm } from "./email-config-form"; -const InstanceEmailPage = observer(() => { +const InstanceEmailPage: React.FC = observer(() => { // store const { fetchInstanceConfigurations, formattedConfig, disableEmail } = useInstance(); @@ -29,7 +29,7 @@ const InstanceEmailPage = observer(() => { message: "Email feature has been disabled", type: TOAST_TYPE.SUCCESS, }); - } catch (error) { + } catch (_error) { setToast({ title: "Error disabling email", message: "Failed to disable email feature. Please try again.", diff --git a/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx b/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx index 5554947be..656d0531d 100644 --- a/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx +++ b/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx @@ -42,7 +42,7 @@ export const AdminSidebarDropdown = observer(() => { )} >
- {currentUser?.email} + {currentUser?.email}
( +
+ + + +
+); diff --git a/apps/admin/app/(all)/(home)/layout.tsx b/apps/admin/app/(all)/(home)/layout.tsx index 19cab04cb..25638c677 100644 --- a/apps/admin/app/(all)/(home)/layout.tsx +++ b/apps/admin/app/(all)/(home)/layout.tsx @@ -1,35 +1,9 @@ "use client"; -import Image from "next/image"; -import Link from "next/link"; -import { useTheme } from "next-themes"; -// logo assets -import PlaneBackgroundPatternDark from "public/auth/background-pattern-dark.svg"; -import PlaneBackgroundPattern from "public/auth/background-pattern.svg"; -import BlackHorizontalLogo from "public/plane-logos/black-horizontal-with-blue-logo.png"; -import WhiteHorizontalLogo from "public/plane-logos/white-horizontal-with-blue-logo.png"; - export default function RootLayout({ children }: { children: React.ReactNode }) { - const { resolvedTheme } = useTheme(); - - const patternBackground = resolvedTheme === "light" ? PlaneBackgroundPattern : PlaneBackgroundPatternDark; - const logo = resolvedTheme === "light" ? BlackHorizontalLogo : WhiteHorizontalLogo; - return ( -
-
-
-
- - Plane logo - -
-
-
- Plane background pattern -
-
{children}
-
+
+ {children}
); } diff --git a/apps/admin/app/(all)/(home)/page.tsx b/apps/admin/app/(all)/(home)/page.tsx index 80ea40ee6..e6ebdf455 100644 --- a/apps/admin/app/(all)/(home)/page.tsx +++ b/apps/admin/app/(all)/(home)/page.tsx @@ -2,8 +2,8 @@ import { observer } from "mobx-react"; // components +import { LogoSpinner } from "@/components/common/logo-spinner"; import { InstanceFailureView } from "@/components/instance/failure"; -import { InstanceLoading } from "@/components/instance/loading"; import { InstanceSetupForm } from "@/components/instance/setup-form"; // hooks import { useInstance } from "@/hooks/store"; @@ -17,46 +17,24 @@ const HomePage = () => { // if instance is not fetched, show loading if (!instance && !error) { return ( -
- +
+
); } // if instance fetch fails, show failure view if (error) { - return ( -
- -
- ); + return ; } // if instance is fetched and setup is not done, show setup form if (instance && !instance?.is_setup_done) { - return ( -
- -
- ); + return ; } // if instance is fetched and setup is done, show sign in form - return ( -
-
-
-

- Manage your Plane instance -

-

- Configure instance-wide settings to secure your instance -

-
- -
-
- ); + return ; }; export default observer(HomePage); diff --git a/apps/admin/app/(all)/(home)/sign-in-form.tsx b/apps/admin/app/(all)/(home)/sign-in-form.tsx index 12b250a93..a5a6ca3e3 100644 --- a/apps/admin/app/(all)/(home)/sign-in-form.tsx +++ b/apps/admin/app/(all)/(home)/sign-in-form.tsx @@ -10,7 +10,9 @@ import { Button, Input, Spinner } from "@plane/ui"; // components import { Banner } from "@/components/common/banner"; // local components +import { FormHeader } from "../../../core/components/instance/form-header"; import { AuthBanner } from "./auth-banner"; +import { AuthHeader } from "./auth-header"; import { authErrorHandler } from "./auth-helpers"; // service initialization @@ -101,78 +103,91 @@ export const InstanceSignInForm: FC = () => { }, [errorCode]); return ( -
setIsSubmitting(true)} - onError={() => setIsSubmitting(false)} - > - {errorData.type && errorData?.message ? ( - - ) : ( - <>{errorInfo && setErrorInfo(value)} />} - )} - - -
- - handleFormChange("email", e.target.value)} - autoComplete="on" - autoFocus - /> -
- -
- -
- handleFormChange("password", e.target.value)} - autoComplete="on" + <> + +
+
+ - {showPassword ? ( - - ) : ( - - )} + setIsSubmitting(true)} + onError={() => setIsSubmitting(false)} + > + {errorData.type && errorData?.message ? ( + + ) : ( + <> + {errorInfo && setErrorInfo(value)} />} + + )} + + +
+ + handleFormChange("email", e.target.value)} + autoComplete="on" + autoFocus + /> +
+ +
+ +
+ handleFormChange("password", e.target.value)} + autoComplete="on" + /> + {showPassword ? ( + + ) : ( + + )} +
+
+
+ +
+
-
- -
- + ); }; diff --git a/apps/admin/core/components/authentication/authentication-method-card.tsx b/apps/admin/core/components/authentication/authentication-method-card.tsx index 897deb7c4..566551f48 100644 --- a/apps/admin/core/components/authentication/authentication-method-card.tsx +++ b/apps/admin/core/components/authentication/authentication-method-card.tsx @@ -7,8 +7,8 @@ import { cn } from "@plane/utils"; type Props = { name: string; description: string; - icon: JSX.Element; - config: JSX.Element; + icon: React.ReactNode; + config: React.ReactNode; disabled?: boolean; withBorder?: boolean; unavailable?: boolean; diff --git a/apps/admin/core/components/authentication/email-config-switch.tsx b/apps/admin/core/components/authentication/email-config-switch.tsx index 783810e2f..16eb98704 100644 --- a/apps/admin/core/components/authentication/email-config-switch.tsx +++ b/apps/admin/core/components/authentication/email-config-switch.tsx @@ -25,9 +25,8 @@ export const EmailCodesConfiguration: React.FC = observer((props) => { { - Boolean(parseInt(enableMagicLogin)) === true - ? updateConfig("ENABLE_MAGIC_LINK_LOGIN", "0") - : updateConfig("ENABLE_MAGIC_LINK_LOGIN", "1"); + const newEnableMagicLogin = Boolean(parseInt(enableMagicLogin)) === true ? "0" : "1"; + updateConfig("ENABLE_MAGIC_LINK_LOGIN", newEnableMagicLogin); }} size="sm" disabled={disabled} diff --git a/apps/admin/core/components/authentication/github-config.tsx b/apps/admin/core/components/authentication/github-config.tsx index 57035580f..249f1ebc4 100644 --- a/apps/admin/core/components/authentication/github-config.tsx +++ b/apps/admin/core/components/authentication/github-config.tsx @@ -35,9 +35,8 @@ export const GithubConfiguration: React.FC = observer((props) => { { - Boolean(parseInt(enableGithubConfig)) === true - ? updateConfig("IS_GITHUB_ENABLED", "0") - : updateConfig("IS_GITHUB_ENABLED", "1"); + const newEnableGithubConfig = Boolean(parseInt(enableGithubConfig)) === true ? "0" : "1"; + updateConfig("IS_GITHUB_ENABLED", newEnableGithubConfig); }} size="sm" disabled={disabled} diff --git a/apps/admin/core/components/authentication/gitlab-config.tsx b/apps/admin/core/components/authentication/gitlab-config.tsx index 4181338d2..f5586f3f3 100644 --- a/apps/admin/core/components/authentication/gitlab-config.tsx +++ b/apps/admin/core/components/authentication/gitlab-config.tsx @@ -35,9 +35,8 @@ export const GitlabConfiguration: React.FC = observer((props) => { { - Boolean(parseInt(enableGitlabConfig)) === true - ? updateConfig("IS_GITLAB_ENABLED", "0") - : updateConfig("IS_GITLAB_ENABLED", "1"); + const newEnableGitlabConfig = Boolean(parseInt(enableGitlabConfig)) === true ? "0" : "1"; + updateConfig("IS_GITLAB_ENABLED", newEnableGitlabConfig); }} size="sm" disabled={disabled} diff --git a/apps/admin/core/components/authentication/google-config.tsx b/apps/admin/core/components/authentication/google-config.tsx index 0f3cc98e3..ec7501b34 100644 --- a/apps/admin/core/components/authentication/google-config.tsx +++ b/apps/admin/core/components/authentication/google-config.tsx @@ -35,9 +35,8 @@ export const GoogleConfiguration: React.FC = observer((props) => { { - Boolean(parseInt(enableGoogleConfig)) === true - ? updateConfig("IS_GOOGLE_ENABLED", "0") - : updateConfig("IS_GOOGLE_ENABLED", "1"); + const newEnableGoogleConfig = Boolean(parseInt(enableGoogleConfig)) === true ? "0" : "1"; + updateConfig("IS_GOOGLE_ENABLED", newEnableGoogleConfig); }} size="sm" disabled={disabled} diff --git a/apps/admin/core/components/authentication/password-config-switch.tsx b/apps/admin/core/components/authentication/password-config-switch.tsx index 00aa62825..5cbd9b03c 100644 --- a/apps/admin/core/components/authentication/password-config-switch.tsx +++ b/apps/admin/core/components/authentication/password-config-switch.tsx @@ -25,9 +25,8 @@ export const PasswordLoginConfiguration: React.FC = observer((props) => { { - Boolean(parseInt(enableEmailPassword)) === true - ? updateConfig("ENABLE_EMAIL_PASSWORD", "0") - : updateConfig("ENABLE_EMAIL_PASSWORD", "1"); + const newEnableEmailPassword = Boolean(parseInt(enableEmailPassword)) === true ? "0" : "1"; + updateConfig("ENABLE_EMAIL_PASSWORD", newEnableEmailPassword); }} size="sm" disabled={disabled} diff --git a/apps/admin/core/components/common/breadcrumb-link.tsx b/apps/admin/core/components/common/breadcrumb-link.tsx index d5a00ccaa..567b88d92 100644 --- a/apps/admin/core/components/common/breadcrumb-link.tsx +++ b/apps/admin/core/components/common/breadcrumb-link.tsx @@ -1,7 +1,7 @@ "use client"; import Link from "next/link"; -import { Tooltip } from "@plane/ui"; +import { Tooltip } from "@plane/propel/tooltip"; type Props = { label?: string; diff --git a/apps/admin/core/components/common/controller-input.tsx b/apps/admin/core/components/common/controller-input.tsx index ca8f30162..cbcbafb2d 100644 --- a/apps/admin/core/components/common/controller-input.tsx +++ b/apps/admin/core/components/common/controller-input.tsx @@ -13,7 +13,7 @@ type Props = { type: "text" | "password"; name: string; label: string; - description?: string | JSX.Element; + description?: string | React.ReactNode; placeholder: string; error: boolean; required: boolean; @@ -23,7 +23,7 @@ export type TControllerInputFormField = { key: string; type: "text" | "password"; label: string; - description?: string | JSX.Element; + description?: string | React.ReactNode; placeholder: string; error: boolean; required: boolean; diff --git a/apps/admin/core/components/common/copy-field.tsx b/apps/admin/core/components/common/copy-field.tsx index 6322356b4..cd8cfee53 100644 --- a/apps/admin/core/components/common/copy-field.tsx +++ b/apps/admin/core/components/common/copy-field.tsx @@ -9,14 +9,14 @@ import { Button, TOAST_TYPE, setToast } from "@plane/ui"; type Props = { label: string; url: string; - description: string | JSX.Element; + description: string | React.ReactNode; }; export type TCopyField = { key: string; label: string; url: string; - description: string | JSX.Element; + description: string | React.ReactNode; }; export const CopyField: React.FC = (props) => { diff --git a/apps/admin/core/components/common/logo-spinner.tsx b/apps/admin/core/components/common/logo-spinner.tsx index 621b685b8..fda44fca5 100644 --- a/apps/admin/core/components/common/logo-spinner.tsx +++ b/apps/admin/core/components/common/logo-spinner.tsx @@ -7,11 +7,11 @@ import LogoSpinnerLight from "@/public/images/logo-spinner-light.gif"; export const LogoSpinner = () => { const { resolvedTheme } = useTheme(); - const logoSrc = resolvedTheme === "dark" ? LogoSpinnerDark : LogoSpinnerLight; + const logoSrc = resolvedTheme === "dark" ? LogoSpinnerLight : LogoSpinnerDark; return (
- logo + logo
); }; diff --git a/apps/admin/core/components/common/password-strength-meter.tsx b/apps/admin/core/components/common/password-strength-meter.tsx deleted file mode 100644 index f4349b24a..000000000 --- a/apps/admin/core/components/common/password-strength-meter.tsx +++ /dev/null @@ -1,89 +0,0 @@ -"use client"; - -import { FC, useMemo } from "react"; -// plane internal packages -import { E_PASSWORD_STRENGTH } from "@plane/constants"; -import { cn, getPasswordStrength } from "@plane/utils"; - -type TPasswordStrengthMeter = { - password: string; - isFocused?: boolean; -}; - -export const PasswordStrengthMeter: FC = (props) => { - const { password, isFocused = false } = props; - // derived values - const strength = useMemo(() => getPasswordStrength(password), [password]); - const strengthBars = useMemo(() => { - switch (strength) { - case E_PASSWORD_STRENGTH.EMPTY: { - return { - bars: [`bg-custom-text-100`, `bg-custom-text-100`, `bg-custom-text-100`], - text: "Please enter your password.", - textColor: "text-custom-text-100", - }; - } - case E_PASSWORD_STRENGTH.LENGTH_NOT_VALID: { - return { - bars: [`bg-red-500`, `bg-custom-text-100`, `bg-custom-text-100`], - text: "Password length should me more than 8 characters.", - textColor: "text-red-500", - }; - } - case E_PASSWORD_STRENGTH.STRENGTH_NOT_VALID: { - return { - bars: [`bg-red-500`, `bg-custom-text-100`, `bg-custom-text-100`], - text: "Password is weak.", - textColor: "text-red-500", - }; - } - case E_PASSWORD_STRENGTH.STRENGTH_VALID: { - return { - bars: [`bg-green-500`, `bg-green-500`, `bg-green-500`], - text: "Password is strong.", - textColor: "text-green-500", - }; - } - default: { - return { - bars: [`bg-custom-text-100`, `bg-custom-text-100`, `bg-custom-text-100`], - text: "Please enter your password.", - textColor: "text-custom-text-100", - }; - } - } - }, [strength]); - - const isPasswordMeterVisible = isFocused ? true : strength === E_PASSWORD_STRENGTH.STRENGTH_VALID ? false : true; - - if (!isPasswordMeterVisible) return <>; - return ( -
-
-
- {strengthBars?.bars.map((color, index) => ( -
- ))} -
-
- {strengthBars?.text} -
-
- - {/*
- {PASSWORD_CRITERIA.map((criteria) => ( -
- - {criteria.label} -
- ))} -
*/} -
- ); -}; diff --git a/apps/admin/core/components/instance/failure.tsx b/apps/admin/core/components/instance/failure.tsx index 735a74c8d..fac8287a5 100644 --- a/apps/admin/core/components/instance/failure.tsx +++ b/apps/admin/core/components/instance/failure.tsx @@ -1,13 +1,15 @@ "use client"; import { FC } from "react"; +import { observer } from "mobx-react"; import Image from "next/image"; import { useTheme } from "next-themes"; import { Button } from "@plane/ui"; // assets +import { AuthHeader } from "@/app/(all)/(home)/auth-header"; import InstanceFailureDarkImage from "@/public/instance/instance-failure-dark.svg"; import InstanceFailureImage from "@/public/instance/instance-failure.svg"; -export const InstanceFailureView: FC = () => { +export const InstanceFailureView: FC = observer(() => { const { resolvedTheme } = useTheme(); const instanceImage = resolvedTheme === "dark" ? InstanceFailureDarkImage : InstanceFailureImage; @@ -17,22 +19,24 @@ export const InstanceFailureView: FC = () => { }; return ( -
-
-
- Plane Logo -

Unable to fetch instance details.

-

- We were unable to fetch the details of the instance.
- Fret not, it might just be a connectivity issue. -

-
-
- + <> + +
+
+
+ Plane Logo +

Unable to fetch instance details.

+

+ We were unable to fetch the details of the instance. Fret not, it might just be a connectivity issue. +

+
+
+ +
-
+ ); -}; +}); diff --git a/apps/admin/core/components/instance/form-header.tsx b/apps/admin/core/components/instance/form-header.tsx new file mode 100644 index 000000000..d915ad293 --- /dev/null +++ b/apps/admin/core/components/instance/form-header.tsx @@ -0,0 +1,8 @@ +"use client"; + +export const FormHeader = ({ heading, subHeading }: { heading: string; subHeading: string }) => ( +
+ {heading} + {subHeading} +
+); diff --git a/apps/admin/core/components/instance/instance-not-ready.tsx b/apps/admin/core/components/instance/instance-not-ready.tsx index 874013f52..2940e81e7 100644 --- a/apps/admin/core/components/instance/instance-not-ready.tsx +++ b/apps/admin/core/components/instance/instance-not-ready.tsx @@ -13,7 +13,7 @@ export const InstanceNotReady: FC = () => (

Welcome aboard Plane!

Plane Logo -

+

Get started by setting up your instance and workspace

diff --git a/apps/admin/core/components/instance/loading.tsx b/apps/admin/core/components/instance/loading.tsx index a21319d9e..27dc4ae6e 100644 --- a/apps/admin/core/components/instance/loading.tsx +++ b/apps/admin/core/components/instance/loading.tsx @@ -6,16 +6,12 @@ import LogoSpinnerLight from "@/public/images/logo-spinner-light.gif"; export const InstanceLoading = () => { const { resolvedTheme } = useTheme(); - const logoSrc = resolvedTheme === "dark" ? LogoSpinnerDark : LogoSpinnerLight; + + const logoSrc = resolvedTheme === "dark" ? LogoSpinnerLight : LogoSpinnerDark; return ( -
-
-
- logo -

Fetching instance details...

-
-
+
+ logo
); }; diff --git a/apps/admin/core/components/instance/setup-form.tsx b/apps/admin/core/components/instance/setup-form.tsx index 4e771e91b..0dbec972d 100644 --- a/apps/admin/core/components/instance/setup-form.tsx +++ b/apps/admin/core/components/instance/setup-form.tsx @@ -7,11 +7,12 @@ import { Eye, EyeOff } from "lucide-react"; // plane internal packages import { API_BASE_URL, E_PASSWORD_STRENGTH } from "@plane/constants"; import { AuthService } from "@plane/services"; -import { Button, Checkbox, Input, Spinner } from "@plane/ui"; +import { Button, Checkbox, Input, PasswordStrengthIndicator, Spinner } from "@plane/ui"; import { getPasswordStrength } from "@plane/utils"; // components +import { AuthHeader } from "@/app/(all)/(home)/auth-header"; import { Banner } from "@/components/common/banner"; -import { PasswordStrengthMeter } from "@/components/common/password-strength-meter"; +import { FormHeader } from "@/components/instance/form-header"; // service initialization const authService = new AuthService(); @@ -132,227 +133,221 @@ export const InstanceSetupForm: FC = (props) => { const renderPasswordMatchError = !isRetryPasswordInputFocused || confirmPassword.length >= password.length; return ( -
-
-
-

- Setup your Plane Instance -

-

- Post setup you will be able to manage this Plane instance. -

+ <> + +
+
+ + {errorData.type && + errorData?.message && + ![EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD].includes(errorData.type) && ( + + )} +
setIsSubmitting(true)} + onError={() => setIsSubmitting(false)} + > + + + +
+
+ + handleFormChange("first_name", e.target.value)} + autoComplete="on" + autoFocus + /> +
+
+ + handleFormChange("last_name", e.target.value)} + autoComplete="on" + /> +
+
+ +
+ + handleFormChange("email", e.target.value)} + hasError={errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL ? true : false} + autoComplete="on" + /> + {errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL && errorData.message && ( +

{errorData.message}

+ )} +
+ +
+ + handleFormChange("company_name", e.target.value)} + /> +
+ +
+ +
+ handleFormChange("password", e.target.value)} + hasError={errorData.type && errorData.type === EErrorCodes.INVALID_PASSWORD ? true : false} + onFocus={() => setIsPasswordInputFocused(true)} + onBlur={() => setIsPasswordInputFocused(false)} + autoComplete="on" + /> + {showPassword.password ? ( + + ) : ( + + )} +
+ {errorData.type && errorData.type === EErrorCodes.INVALID_PASSWORD && errorData.message && ( +

{errorData.message}

+ )} + +
+ +
+ +
+ handleFormChange("confirm_password", e.target.value)} + placeholder="Confirm password" + className="w-full border border-custom-border-100 !bg-custom-background-100 pr-12 placeholder:text-custom-text-400" + onFocus={() => setIsRetryPasswordInputFocused(true)} + onBlur={() => setIsRetryPasswordInputFocused(false)} + /> + {showPassword.retypePassword ? ( + + ) : ( + + )} +
+ {!!formData.confirm_password && + formData.password !== formData.confirm_password && + renderPasswordMatchError && Passwords don{"'"}t match} +
+ +
+
+ handleFormChange("is_telemetry_enabled", !formData.is_telemetry_enabled)} + checked={formData.is_telemetry_enabled} + /> +
+ +
+ +
+ +
+
- - {errorData.type && - errorData?.message && - ![EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD].includes(errorData.type) && ( - - )} - -
setIsSubmitting(true)} - onError={() => setIsSubmitting(false)} - > - - - -
-
- - handleFormChange("first_name", e.target.value)} - autoComplete="on" - autoFocus - /> -
-
- - handleFormChange("last_name", e.target.value)} - autoComplete="on" - /> -
-
- -
- - handleFormChange("email", e.target.value)} - hasError={errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL ? true : false} - autoComplete="on" - /> - {errorData.type && errorData.type === EErrorCodes.INVALID_EMAIL && errorData.message && ( -

{errorData.message}

- )} -
- -
- - handleFormChange("company_name", e.target.value)} - /> -
- -
- -
- handleFormChange("password", e.target.value)} - hasError={errorData.type && errorData.type === EErrorCodes.INVALID_PASSWORD ? true : false} - onFocus={() => setIsPasswordInputFocused(true)} - onBlur={() => setIsPasswordInputFocused(false)} - autoComplete="on" - /> - {showPassword.password ? ( - - ) : ( - - )} -
- {errorData.type && errorData.type === EErrorCodes.INVALID_PASSWORD && errorData.message && ( -

{errorData.message}

- )} - -
- -
- -
- handleFormChange("confirm_password", e.target.value)} - placeholder="Confirm password" - className="w-full border border-onboarding-border-100 !bg-onboarding-background-200 pr-12 placeholder:text-onboarding-text-400" - onFocus={() => setIsRetryPasswordInputFocused(true)} - onBlur={() => setIsRetryPasswordInputFocused(false)} - /> - {showPassword.retypePassword ? ( - - ) : ( - - )} -
- {!!formData.confirm_password && - formData.password !== formData.confirm_password && - renderPasswordMatchError && Passwords don{"'"}t match} -
- -
-
- handleFormChange("is_telemetry_enabled", !formData.is_telemetry_enabled)} - checked={formData.is_telemetry_enabled} - /> -
- - - See More - -
- -
- -
-
-
+ ); }; diff --git a/apps/admin/core/components/workspace/list-item.tsx b/apps/admin/core/components/workspace/list-item.tsx index ae693eb72..85a2b3c61 100644 --- a/apps/admin/core/components/workspace/list-item.tsx +++ b/apps/admin/core/components/workspace/list-item.tsx @@ -2,7 +2,7 @@ import { observer } from "mobx-react"; import { ExternalLink } from "lucide-react"; // plane internal packages import { WEB_BASE_URL } from "@plane/constants"; -import { Tooltip } from "@plane/ui"; +import { Tooltip } from "@plane/propel/tooltip"; import { getFileURL } from "@plane/utils"; // hooks import { useWorkspace } from "@/hooks/store"; diff --git a/apps/admin/core/store/instance.store.ts b/apps/admin/core/store/instance.store.ts index 1179f04d6..764c95bf2 100644 --- a/apps/admin/core/store/instance.store.ts +++ b/apps/admin/core/store/instance.store.ts @@ -209,7 +209,7 @@ export class InstanceStore implements IInstanceStore { }); }); await this.instanceService.disableEmail(); - } catch (error) { + } catch (_error) { console.error("Error disabling the email"); this.instanceConfigurations = instanceConfigurations; } diff --git a/apps/admin/package.json b/apps/admin/package.json index 556c29a58..18acfb50f 100644 --- a/apps/admin/package.json +++ b/apps/admin/package.json @@ -1,7 +1,7 @@ { "name": "admin", "description": "Admin UI for Plane", - "version": "0.28.0", + "version": "1.0.0", "license": "AGPL-3.0", "private": true, "scripts": { @@ -10,7 +10,7 @@ "preview": "next build && next start", "start": "next start", "clean": "rm -rf .turbo && rm -rf .next && rm -rf node_modules && rm -rf dist", - "check:lint": "eslint . --max-warnings 0", + "check:lint": "eslint . --max-warnings 19", "check:types": "tsc --noEmit", "check:format": "prettier --check \"**/*.{ts,tsx,md,json,css,scss}\"", "fix:lint": "eslint . --fix", @@ -18,40 +18,38 @@ }, "dependencies": { "@headlessui/react": "^1.7.19", - "@plane/constants": "*", - "@plane/hooks": "*", - "@plane/propel": "*", - "@plane/services": "*", - "@plane/types": "*", - "@plane/ui": "*", - "@plane/utils": "*", - "@tailwindcss/typography": "^0.5.9", - "@types/lodash": "^4.17.0", + "@plane/constants": "workspace:*", + "@plane/hooks": "workspace:*", + "@plane/propel": "workspace:*", + "@plane/services": "workspace:*", + "@plane/types": "workspace:*", + "@plane/ui": "workspace:*", + "@plane/utils": "workspace:*", "autoprefixer": "10.4.14", - "axios": "1.11.0", - "lodash": "^4.17.21", - "lucide-react": "^0.469.0", - "mobx": "^6.12.0", - "mobx-react": "^9.1.1", - "next": "14.2.30", + "axios": "catalog:", + "lodash": "catalog:", + "lucide-react": "catalog:", + "mobx": "catalog:", + "mobx-react": "catalog:", + "next": "catalog:", "next-themes": "^0.2.1", "postcss": "^8.4.49", - "react": "^18.3.1", - "react-dom": "^18.3.1", + "react": "catalog:", + "react-dom": "catalog:", "react-hook-form": "7.51.5", - "swr": "^2.2.4", - "uuid": "^9.0.1", - "zxcvbn": "^4.4.2" + "sharp": "catalog:", + "swr": "catalog:", + "uuid": "catalog:" }, "devDependencies": { - "@plane/eslint-config": "*", - "@plane/tailwind-config": "*", - "@plane/typescript-config": "*", + "@plane/eslint-config": "workspace:*", + "@plane/tailwind-config": "workspace:*", + "@plane/typescript-config": "workspace:*", + "@types/lodash": "catalog:", "@types/node": "18.16.1", - "@types/react": "^18.3.11", - "@types/react-dom": "^18.2.18", + "@types/react": "catalog:", + "@types/react-dom": "catalog:", "@types/uuid": "^9.0.8", - "@types/zxcvbn": "^4.4.4", - "typescript": "5.8.3" + "typescript": "catalog:" } } diff --git a/apps/admin/public/favicon/android-chrome-192x192.png b/apps/admin/public/favicon/android-chrome-192x192.png index 62e95acfc..4a005e542 100644 Binary files a/apps/admin/public/favicon/android-chrome-192x192.png and b/apps/admin/public/favicon/android-chrome-192x192.png differ diff --git a/apps/admin/public/favicon/android-chrome-512x512.png b/apps/admin/public/favicon/android-chrome-512x512.png index 41400832b..27fafe822 100644 Binary files a/apps/admin/public/favicon/android-chrome-512x512.png and b/apps/admin/public/favicon/android-chrome-512x512.png differ diff --git a/apps/admin/public/favicon/apple-touch-icon.png b/apps/admin/public/favicon/apple-touch-icon.png index 5273d4951..a63126783 100644 Binary files a/apps/admin/public/favicon/apple-touch-icon.png and b/apps/admin/public/favicon/apple-touch-icon.png differ diff --git a/apps/admin/public/favicon/favicon-16x16.png b/apps/admin/public/favicon/favicon-16x16.png index 8ddbd49c0..af59ef010 100644 Binary files a/apps/admin/public/favicon/favicon-16x16.png and b/apps/admin/public/favicon/favicon-16x16.png differ diff --git a/apps/admin/public/favicon/favicon-32x32.png b/apps/admin/public/favicon/favicon-32x32.png index 80cbe7a68..16a1271ad 100644 Binary files a/apps/admin/public/favicon/favicon-32x32.png and b/apps/admin/public/favicon/favicon-32x32.png differ diff --git a/apps/admin/public/favicon/favicon.ico b/apps/admin/public/favicon/favicon.ico index 9094a07c7..613b1a313 100644 Binary files a/apps/admin/public/favicon/favicon.ico and b/apps/admin/public/favicon/favicon.ico differ diff --git a/apps/admin/public/favicon/site.webmanifest b/apps/admin/public/favicon/site.webmanifest index 0b08af126..1d4105781 100644 --- a/apps/admin/public/favicon/site.webmanifest +++ b/apps/admin/public/favicon/site.webmanifest @@ -2,8 +2,8 @@ "name": "", "short_name": "", "icons": [ - { "src": "/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" }, - { "src": "/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" } + { "src": "/favicon/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" }, + { "src": "/favicon/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" } ], "theme_color": "#ffffff", "background_color": "#ffffff", diff --git a/apps/admin/public/images/logo-spinner-dark.gif b/apps/admin/public/images/logo-spinner-dark.gif index 4e0a1deb7..8bd083255 100644 Binary files a/apps/admin/public/images/logo-spinner-dark.gif and b/apps/admin/public/images/logo-spinner-dark.gif differ diff --git a/apps/admin/public/images/logo-spinner-light.gif b/apps/admin/public/images/logo-spinner-light.gif index 7c9bfbe0e..8b5710314 100644 Binary files a/apps/admin/public/images/logo-spinner-light.gif and b/apps/admin/public/images/logo-spinner-light.gif differ diff --git a/apps/admin/public/plane-logos/black-horizontal-with-blue-logo.png b/apps/admin/public/plane-logos/black-horizontal-with-blue-logo.png deleted file mode 100644 index c14505a6f..000000000 Binary files a/apps/admin/public/plane-logos/black-horizontal-with-blue-logo.png and /dev/null differ diff --git a/apps/admin/public/plane-logos/blue-without-text.png b/apps/admin/public/plane-logos/blue-without-text.png deleted file mode 100644 index ea94aec79..000000000 Binary files a/apps/admin/public/plane-logos/blue-without-text.png and /dev/null differ diff --git a/apps/admin/public/plane-logos/white-horizontal-with-blue-logo.png b/apps/admin/public/plane-logos/white-horizontal-with-blue-logo.png deleted file mode 100644 index 97560fb9f..000000000 Binary files a/apps/admin/public/plane-logos/white-horizontal-with-blue-logo.png and /dev/null differ diff --git a/apps/admin/styles/globals.css b/apps/admin/styles/globals.css index d5554ce2f..737015d26 100644 --- a/apps/admin/styles/globals.css +++ b/apps/admin/styles/globals.css @@ -24,24 +24,24 @@ :root { color-scheme: light !important; - --color-primary-10: 236, 241, 255; - --color-primary-20: 217, 228, 255; - --color-primary-30: 197, 214, 255; - --color-primary-40: 178, 200, 255; - --color-primary-50: 159, 187, 255; - --color-primary-60: 140, 173, 255; - --color-primary-70: 121, 159, 255; - --color-primary-80: 101, 145, 255; - --color-primary-90: 82, 132, 255; - --color-primary-100: 63, 118, 255; - --color-primary-200: 57, 106, 230; - --color-primary-300: 50, 94, 204; - --color-primary-400: 44, 83, 179; - --color-primary-500: 38, 71, 153; - --color-primary-600: 32, 59, 128; - --color-primary-700: 25, 47, 102; - --color-primary-800: 19, 35, 76; - --color-primary-900: 13, 24, 51; + --color-primary-10: 229, 243, 250; + --color-primary-20: 216, 237, 248; + --color-primary-30: 199, 229, 244; + --color-primary-40: 169, 214, 239; + --color-primary-50: 144, 202, 234; + --color-primary-60: 109, 186, 227; + --color-primary-70: 75, 170, 221; + --color-primary-80: 41, 154, 214; + --color-primary-90: 34, 129, 180; + --color-primary-100: 0, 99, 153; + --color-primary-200: 0, 92, 143; + --color-primary-300: 0, 86, 133; + --color-primary-400: 0, 77, 117; + --color-primary-500: 0, 66, 102; + --color-primary-600: 0, 53, 82; + --color-primary-700: 0, 43, 66; + --color-primary-800: 0, 33, 51; + --color-primary-900: 0, 23, 36; --color-background-100: 255, 255, 255; /* primary bg */ --color-background-90: 247, 247, 247; /* secondary bg */ @@ -135,28 +135,6 @@ --color-border-300: 212, 212, 212; /* strong border- 1 */ --color-border-400: 185, 185, 185; /* strong border- 2 */ - /* onboarding colors */ - --gradient-onboarding-100: linear-gradient(106deg, #f2f6ff 29.8%, #e1eaff 99.34%); - --gradient-onboarding-200: linear-gradient(129deg, rgba(255, 255, 255, 0) -22.23%, rgba(255, 255, 255, 0.8) 62.98%); - --gradient-onboarding-300: linear-gradient(164deg, #fff 4.25%, rgba(255, 255, 255, 0.06) 93.5%); - --gradient-onboarding-400: linear-gradient(129deg, rgba(255, 255, 255, 0) -22.23%, rgba(255, 255, 255, 0.8) 62.98%); - - --color-onboarding-text-100: 23, 23, 23; - --color-onboarding-text-200: 58, 58, 58; - --color-onboarding-text-300: 82, 82, 82; - --color-onboarding-text-400: 163, 163, 163; - - --color-onboarding-background-100: 236, 241, 255; - --color-onboarding-background-200: 255, 255, 255; - --color-onboarding-background-300: 236, 241, 255; - --color-onboarding-background-400: 177, 206, 250; - - --color-onboarding-border-100: 229, 229, 229; - --color-onboarding-border-200: 217, 228, 255; - --color-onboarding-border-300: 229, 229, 229, 0.5; - - --color-onboarding-shadow-sm: 0px 4px 20px 0px rgba(126, 139, 171, 0.1); - /* toast theme */ --color-toast-success-text: 62, 155, 79; --color-toast-error-text: 220, 62, 66; @@ -197,6 +175,25 @@ [data-theme="dark-contrast"] { color-scheme: dark !important; + --color-primary-10: 8, 31, 43; + --color-primary-20: 10, 37, 51; + --color-primary-30: 13, 49, 69; + --color-primary-40: 16, 58, 81; + --color-primary-50: 18, 68, 94; + --color-primary-60: 23, 86, 120; + --color-primary-70: 28, 104, 146; + --color-primary-80: 31, 116, 163; + --color-primary-90: 34, 129, 180; + --color-primary-100: 40, 146, 204; + --color-primary-200: 41, 154, 214; + --color-primary-300: 75, 170, 221; + --color-primary-400: 109, 186, 227; + --color-primary-500: 144, 202, 234; + --color-primary-600: 169, 214, 239; + --color-primary-700: 199, 229, 244; + --color-primary-800: 216, 237, 248; + --color-primary-900: 229, 243, 250; + --color-background-100: 25, 25, 25; /* primary bg */ --color-background-90: 32, 32, 32; /* secondary bg */ --color-background-80: 44, 44, 44; /* tertiary bg */ @@ -225,27 +222,6 @@ --color-border-300: 46, 46, 46; /* strong border- 1 */ --color-border-400: 58, 58, 58; /* strong border- 2 */ - /* onboarding colors */ - --gradient-onboarding-100: linear-gradient(106deg, #18191b 25.17%, #18191b 99.34%); - --gradient-onboarding-200: linear-gradient(129deg, rgba(47, 49, 53, 0.8) -22.23%, rgba(33, 34, 37, 0.8) 62.98%); - --gradient-onboarding-300: linear-gradient(167deg, rgba(47, 49, 53, 0.45) 19.22%, #212225 98.48%); - - --color-onboarding-text-100: 237, 238, 240; - --color-onboarding-text-200: 176, 180, 187; - --color-onboarding-text-300: 118, 123, 132; - --color-onboarding-text-400: 105, 110, 119; - - --color-onboarding-background-100: 54, 58, 64; - --color-onboarding-background-200: 40, 42, 45; - --color-onboarding-background-300: 40, 42, 45; - --color-onboarding-background-400: 67, 72, 79; - - --color-onboarding-border-100: 54, 58, 64; - --color-onboarding-border-200: 54, 58, 64; - --color-onboarding-border-300: 34, 35, 38, 0.5; - - --color-onboarding-shadow-sm: 0px 4px 20px 0px rgba(39, 44, 56, 0.1); - /* toast theme */ --color-toast-success-text: 178, 221, 181; --color-toast-error-text: 206, 44, 49; @@ -286,25 +262,6 @@ [data-theme="dark"], [data-theme="light-contrast"], [data-theme="dark-contrast"] { - --color-primary-10: 236, 241, 255; - --color-primary-20: 217, 228, 255; - --color-primary-30: 197, 214, 255; - --color-primary-40: 178, 200, 255; - --color-primary-50: 159, 187, 255; - --color-primary-60: 140, 173, 255; - --color-primary-70: 121, 159, 255; - --color-primary-80: 101, 145, 255; - --color-primary-90: 82, 132, 255; - --color-primary-100: 63, 118, 255; - --color-primary-200: 57, 106, 230; - --color-primary-300: 50, 94, 204; - --color-primary-400: 44, 83, 179; - --color-primary-500: 38, 71, 153; - --color-primary-600: 32, 59, 128; - --color-primary-700: 25, 47, 102; - --color-primary-800: 19, 35, 76; - --color-primary-900: 13, 24, 51; - --color-sidebar-background-100: var(--color-background-100); /* primary sidebar bg */ --color-sidebar-background-90: var(--color-background-90); /* secondary sidebar bg */ --color-sidebar-background-80: var(--color-background-80); /* tertiary sidebar bg */ diff --git a/apps/api/package.json b/apps/api/package.json index 6b374e611..97122880f 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -1,6 +1,6 @@ { "name": "plane-api", - "version": "0.28.0", + "version": "1.0.0", "license": "AGPL-3.0", "private": true, "description": "API server powering Plane's backend" diff --git a/apps/api/plane/api/apps.py b/apps/api/plane/api/apps.py index 6ba36e7e5..b48a9a949 100644 --- a/apps/api/plane/api/apps.py +++ b/apps/api/plane/api/apps.py @@ -3,3 +3,10 @@ from django.apps import AppConfig class ApiConfig(AppConfig): name = "plane.api" + + def ready(self): + # Import authentication extensions to register them with drf-spectacular + try: + import plane.utils.openapi.auth # noqa + except ImportError: + pass \ No newline at end of file diff --git a/apps/api/plane/api/serializers/__init__.py b/apps/api/plane/api/serializers/__init__.py index 8c84b2328..7596915eb 100644 --- a/apps/api/plane/api/serializers/__init__.py +++ b/apps/api/plane/api/serializers/__init__.py @@ -1,8 +1,14 @@ from .user import UserLiteSerializer from .workspace import WorkspaceLiteSerializer -from .project import ProjectSerializer, ProjectLiteSerializer +from .project import ( + ProjectSerializer, + ProjectLiteSerializer, + ProjectCreateSerializer, + ProjectUpdateSerializer, +) from .issue import ( IssueSerializer, + LabelCreateUpdateSerializer, LabelSerializer, IssueLinkSerializer, IssueCommentSerializer, @@ -10,9 +16,40 @@ from .issue import ( IssueActivitySerializer, IssueExpandSerializer, IssueLiteSerializer, + IssueAttachmentUploadSerializer, + IssueSearchSerializer, + IssueCommentCreateSerializer, + IssueLinkCreateSerializer, + IssueLinkUpdateSerializer, ) from .state import StateLiteSerializer, StateSerializer -from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer -from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer -from .intake import IntakeIssueSerializer +from .cycle import ( + CycleSerializer, + CycleIssueSerializer, + CycleLiteSerializer, + CycleIssueRequestSerializer, + TransferCycleIssueRequestSerializer, + CycleCreateSerializer, + CycleUpdateSerializer, +) +from .module import ( + ModuleSerializer, + ModuleIssueSerializer, + ModuleLiteSerializer, + ModuleIssueRequestSerializer, + ModuleCreateSerializer, + ModuleUpdateSerializer, +) +from .intake import ( + IntakeIssueSerializer, + IntakeIssueCreateSerializer, + IntakeIssueUpdateSerializer, +) from .estimate import EstimatePointSerializer +from .asset import ( + UserAssetUploadSerializer, + AssetUpdateSerializer, + GenericAssetUploadSerializer, + GenericAssetUpdateSerializer, + FileAssetSerializer, +) diff --git a/apps/api/plane/api/serializers/asset.py b/apps/api/plane/api/serializers/asset.py new file mode 100644 index 000000000..b63dc7ebb --- /dev/null +++ b/apps/api/plane/api/serializers/asset.py @@ -0,0 +1,123 @@ +# Third party imports +from rest_framework import serializers + +# Module imports +from .base import BaseSerializer +from plane.db.models import FileAsset + + +class UserAssetUploadSerializer(serializers.Serializer): + """ + Serializer for user asset upload requests. + + This serializer validates the metadata required to generate a presigned URL + for uploading user profile assets (avatar or cover image) directly to S3 storage. + Supports JPEG, PNG, WebP, JPG, and GIF image formats with size validation. + """ + + name = serializers.CharField(help_text="Original filename of the asset") + type = serializers.ChoiceField( + choices=[ + ("image/jpeg", "JPEG"), + ("image/png", "PNG"), + ("image/webp", "WebP"), + ("image/jpg", "JPG"), + ("image/gif", "GIF"), + ], + default="image/jpeg", + help_text="MIME type of the file", + style={"placeholder": "image/jpeg"}, + ) + size = serializers.IntegerField(help_text="File size in bytes") + entity_type = serializers.ChoiceField( + choices=[ + (FileAsset.EntityTypeContext.USER_AVATAR, "User Avatar"), + (FileAsset.EntityTypeContext.USER_COVER, "User Cover"), + ], + help_text="Type of user asset", + ) + + +class AssetUpdateSerializer(serializers.Serializer): + """ + Serializer for asset status updates after successful upload completion. + + Handles post-upload asset metadata updates including attribute modifications + and upload confirmation for S3-based file storage workflows. + """ + + attributes = serializers.JSONField( + required=False, help_text="Additional attributes to update for the asset" + ) + + +class GenericAssetUploadSerializer(serializers.Serializer): + """ + Serializer for generic asset upload requests with project association. + + Validates metadata for generating presigned URLs for workspace assets including + project association, external system tracking, and file validation for + document management and content storage workflows. + """ + + name = serializers.CharField(help_text="Original filename of the asset") + type = serializers.CharField(required=False, help_text="MIME type of the file") + size = serializers.IntegerField(help_text="File size in bytes") + project_id = serializers.UUIDField( + required=False, + help_text="UUID of the project to associate with the asset", + style={"placeholder": "123e4567-e89b-12d3-a456-426614174000"}, + ) + external_id = serializers.CharField( + required=False, + help_text="External identifier for the asset (for integration tracking)", + ) + external_source = serializers.CharField( + required=False, help_text="External source system (for integration tracking)" + ) + + +class GenericAssetUpdateSerializer(serializers.Serializer): + """ + Serializer for generic asset upload confirmation and status management. + + Handles post-upload status updates for workspace assets including + upload completion marking and metadata finalization. + """ + + is_uploaded = serializers.BooleanField( + default=True, help_text="Whether the asset has been successfully uploaded" + ) + + +class FileAssetSerializer(BaseSerializer): + """ + Comprehensive file asset serializer with complete metadata and URL generation. + + Provides full file asset information including storage metadata, access URLs, + relationship data, and upload status for complete asset management workflows. + """ + + asset_url = serializers.CharField(read_only=True) + + class Meta: + model = FileAsset + fields = "__all__" + read_only_fields = [ + "id", + "created_by", + "updated_by", + "created_at", + "updated_at", + "workspace", + "project", + "issue", + "comment", + "page", + "draft_issue", + "user", + "is_deleted", + "deleted_at", + "storage_metadata", + "asset_url", + ] diff --git a/apps/api/plane/api/serializers/base.py b/apps/api/plane/api/serializers/base.py index 4b1e54707..46bd398bc 100644 --- a/apps/api/plane/api/serializers/base.py +++ b/apps/api/plane/api/serializers/base.py @@ -3,6 +3,13 @@ from rest_framework import serializers class BaseSerializer(serializers.ModelSerializer): + """ + Base serializer providing common functionality for all model serializers. + + Features field filtering, dynamic expansion of related fields, and standardized + primary key handling for consistent API responses across the application. + """ + id = serializers.PrimaryKeyRelatedField(read_only=True) def __init__(self, *args, **kwargs): @@ -84,6 +91,7 @@ class BaseSerializer(serializers.ModelSerializer): "project_lead": UserLiteSerializer, "state": StateLiteSerializer, "created_by": UserLiteSerializer, + "updated_by": UserLiteSerializer, "issue": IssueSerializer, "actor": UserLiteSerializer, "owned_by": UserLiteSerializer, diff --git a/apps/api/plane/api/serializers/cycle.py b/apps/api/plane/api/serializers/cycle.py index 7a78b6664..cf057d842 100644 --- a/apps/api/plane/api/serializers/cycle.py +++ b/apps/api/plane/api/serializers/cycle.py @@ -8,16 +8,13 @@ from plane.db.models import Cycle, CycleIssue from plane.utils.timezone_converter import convert_to_utc -class CycleSerializer(BaseSerializer): - total_issues = serializers.IntegerField(read_only=True) - cancelled_issues = serializers.IntegerField(read_only=True) - completed_issues = serializers.IntegerField(read_only=True) - started_issues = serializers.IntegerField(read_only=True) - unstarted_issues = serializers.IntegerField(read_only=True) - backlog_issues = serializers.IntegerField(read_only=True) - total_estimates = serializers.FloatField(read_only=True) - completed_estimates = serializers.FloatField(read_only=True) - started_estimates = serializers.FloatField(read_only=True) +class CycleCreateSerializer(BaseSerializer): + """ + Serializer for creating cycles with timezone handling and date validation. + + Manages cycle creation including project timezone conversion, date range validation, + and UTC normalization for time-bound iteration planning and sprint management. + """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -27,6 +24,29 @@ class CycleSerializer(BaseSerializer): self.fields["start_date"].timezone = project_timezone self.fields["end_date"].timezone = project_timezone + class Meta: + model = Cycle + fields = [ + "name", + "description", + "start_date", + "end_date", + "owned_by", + "external_source", + "external_id", + "timezone", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] + def validate(self, data): if ( data.get("start_date", None) is not None @@ -59,6 +79,40 @@ class CycleSerializer(BaseSerializer): ) return data + +class CycleUpdateSerializer(CycleCreateSerializer): + """ + Serializer for updating cycles with enhanced ownership management. + + Extends cycle creation with update-specific features including ownership + assignment and modification tracking for cycle lifecycle management. + """ + + class Meta(CycleCreateSerializer.Meta): + model = Cycle + fields = CycleCreateSerializer.Meta.fields + [ + "owned_by", + ] + + +class CycleSerializer(BaseSerializer): + """ + Cycle serializer with comprehensive project metrics and time tracking. + + Provides cycle details including work item counts by status, progress estimates, + and time-bound iteration data for project management and sprint planning. + """ + + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + total_estimates = serializers.FloatField(read_only=True) + completed_estimates = serializers.FloatField(read_only=True) + started_estimates = serializers.FloatField(read_only=True) + class Meta: model = Cycle fields = "__all__" @@ -76,6 +130,13 @@ class CycleSerializer(BaseSerializer): class CycleIssueSerializer(BaseSerializer): + """ + Serializer for cycle-issue relationships with sub-issue counting. + + Manages the association between cycles and work items, including + hierarchical issue tracking for nested work item structures. + """ + sub_issues_count = serializers.IntegerField(read_only=True) class Meta: @@ -85,6 +146,39 @@ class CycleIssueSerializer(BaseSerializer): class CycleLiteSerializer(BaseSerializer): + """ + Lightweight cycle serializer for minimal data transfer. + + Provides essential cycle information without computed metrics, + optimized for list views and reference lookups. + """ + class Meta: model = Cycle fields = "__all__" + + +class CycleIssueRequestSerializer(serializers.Serializer): + """ + Serializer for bulk work item assignment to cycles. + + Validates work item ID lists for batch operations including + cycle assignment and sprint planning workflows. + """ + + issues = serializers.ListField( + child=serializers.UUIDField(), help_text="List of issue IDs to add to the cycle" + ) + + +class TransferCycleIssueRequestSerializer(serializers.Serializer): + """ + Serializer for transferring work items between cycles. + + Handles work item migration between cycles including validation + and relationship updates for sprint reallocation workflows. + """ + + new_cycle_id = serializers.UUIDField( + help_text="ID of the target cycle to transfer issues to" + ) diff --git a/apps/api/plane/api/serializers/estimate.py b/apps/api/plane/api/serializers/estimate.py index 0d9235dad..b670006d5 100644 --- a/apps/api/plane/api/serializers/estimate.py +++ b/apps/api/plane/api/serializers/estimate.py @@ -4,6 +4,13 @@ from .base import BaseSerializer class EstimatePointSerializer(BaseSerializer): + """ + Serializer for project estimation points and story point values. + + Handles numeric estimation data for work item sizing and sprint planning, + providing standardized point values for project velocity calculations. + """ + class Meta: model = EstimatePoint fields = ["id", "value"] diff --git a/apps/api/plane/api/serializers/intake.py b/apps/api/plane/api/serializers/intake.py index 69c85ed61..32f8bf2da 100644 --- a/apps/api/plane/api/serializers/intake.py +++ b/apps/api/plane/api/serializers/intake.py @@ -1,11 +1,77 @@ # Module improts from .base import BaseSerializer from .issue import IssueExpandSerializer -from plane.db.models import IntakeIssue +from plane.db.models import IntakeIssue, Issue from rest_framework import serializers +class IssueForIntakeSerializer(BaseSerializer): + """ + Serializer for work item data within intake submissions. + + Handles essential work item fields for intake processing including + content validation and priority assignment for triage workflows. + """ + + class Meta: + model = Issue + fields = [ + "name", + "description", + "description_html", + "priority", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IntakeIssueCreateSerializer(BaseSerializer): + """ + Serializer for creating intake work items with embedded issue data. + + Manages intake work item creation including nested issue creation, + status assignment, and source tracking for issue queue management. + """ + + issue = IssueForIntakeSerializer(help_text="Issue data for the intake issue") + + class Meta: + model = IntakeIssue + fields = [ + "issue", + "intake", + "status", + "snoozed_till", + "duplicate_to", + "source", + "source_email", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + class IntakeIssueSerializer(BaseSerializer): + """ + Comprehensive serializer for intake work items with expanded issue details. + + Provides full intake work item data including embedded issue information, + status tracking, and triage metadata for issue queue management. + """ + issue_detail = IssueExpandSerializer(read_only=True, source="issue") inbox = serializers.UUIDField(source="intake.id", read_only=True) @@ -22,3 +88,53 @@ class IntakeIssueSerializer(BaseSerializer): "created_at", "updated_at", ] + + +class IntakeIssueUpdateSerializer(BaseSerializer): + """ + Serializer for updating intake work items and their associated issues. + + Handles intake work item modifications including status changes, triage decisions, + and embedded issue updates for issue queue processing workflows. + """ + + issue = IssueForIntakeSerializer( + required=False, help_text="Issue data to update in the intake issue" + ) + + class Meta: + model = IntakeIssue + fields = [ + "status", + "snoozed_till", + "duplicate_to", + "source", + "source_email", + "issue", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + +class IssueDataSerializer(serializers.Serializer): + """ + Serializer for nested work item data in intake request payloads. + + Validates core work item fields within intake requests including + content formatting, priority levels, and metadata for issue creation. + """ + + name = serializers.CharField(max_length=255, help_text="Issue name") + description_html = serializers.CharField( + required=False, allow_null=True, help_text="Issue description HTML" + ) + priority = serializers.ChoiceField( + choices=Issue.PRIORITY_CHOICES, default="none", help_text="Issue priority" + ) diff --git a/apps/api/plane/api/serializers/issue.py b/apps/api/plane/api/serializers/issue.py index 20f967e3b..075823cbf 100644 --- a/apps/api/plane/api/serializers/issue.py +++ b/apps/api/plane/api/serializers/issue.py @@ -24,7 +24,6 @@ from plane.db.models import ( ) from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) @@ -40,6 +39,13 @@ from django.core.validators import URLValidator class IssueSerializer(BaseSerializer): + """ + Comprehensive work item serializer with full relationship management. + + Handles complete work item lifecycle including assignees, labels, validation, + and related model updates. Supports dynamic field expansion and HTML content processing. + """ + assignees = serializers.ListField( child=serializers.PrimaryKeyRelatedField( queryset=User.objects.values_list("id", flat=True) @@ -82,20 +88,24 @@ class IssueSerializer(BaseSerializer): raise serializers.ValidationError("Invalid HTML passed") # Validate description content for security - if data.get("description"): - is_valid, error_msg = validate_json_content(data["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - if data.get("description_html"): - is_valid, error_msg = validate_html_content(data["description_html"]) + is_valid, error_msg, sanitized_html = validate_html_content( + data["description_html"] + ) if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) + # Update the data with sanitized HTML if available + if sanitized_html is not None: + data["description_html"] = sanitized_html if data.get("description_binary"): is_valid, error_msg = validate_binary_data(data["description_binary"]) if not is_valid: - raise serializers.ValidationError({"description_binary": error_msg}) + raise serializers.ValidationError( + {"description_binary": "Invalid binary data"} + ) # Validate assignees are from project if data.get("assignees", []): @@ -336,13 +346,58 @@ class IssueSerializer(BaseSerializer): class IssueLiteSerializer(BaseSerializer): + """ + Lightweight work item serializer for minimal data transfer. + + Provides essential work item identifiers optimized for list views, + references, and performance-critical operations. + """ + class Meta: model = Issue fields = ["id", "sequence_id", "project_id"] read_only_fields = fields +class LabelCreateUpdateSerializer(BaseSerializer): + """ + Serializer for creating and updating work item labels. + + Manages label metadata including colors, descriptions, hierarchy, + and sorting for work item categorization and filtering. + """ + + class Meta: + model = Label + fields = [ + "name", + "color", + "description", + "external_source", + "external_id", + "parent", + "sort_order", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] + + class LabelSerializer(BaseSerializer): + """ + Full serializer for work item labels with complete metadata. + + Provides comprehensive label information including hierarchical relationships, + visual properties, and organizational data for work item tagging. + """ + class Meta: model = Label fields = "__all__" @@ -358,10 +413,17 @@ class LabelSerializer(BaseSerializer): ] -class IssueLinkSerializer(BaseSerializer): +class IssueLinkCreateSerializer(BaseSerializer): + """ + Serializer for creating work item external links with validation. + + Handles URL validation, format checking, and duplicate prevention + for attaching external resources to work items. + """ + class Meta: model = IssueLink - fields = "__all__" + fields = ["url", "issue_id"] read_only_fields = [ "id", "workspace", @@ -397,6 +459,22 @@ class IssueLinkSerializer(BaseSerializer): ) return IssueLink.objects.create(**validated_data) + +class IssueLinkUpdateSerializer(IssueLinkCreateSerializer): + """ + Serializer for updating work item external links. + + Extends link creation with update-specific validation to prevent + URL conflicts and maintain link integrity during modifications. + """ + + class Meta(IssueLinkCreateSerializer.Meta): + model = IssueLink + fields = IssueLinkCreateSerializer.Meta.fields + [ + "issue_id", + ] + read_only_fields = IssueLinkCreateSerializer.Meta.read_only_fields + def update(self, instance, validated_data): if ( IssueLink.objects.filter( @@ -412,7 +490,37 @@ class IssueLinkSerializer(BaseSerializer): return super().update(instance, validated_data) +class IssueLinkSerializer(BaseSerializer): + """ + Full serializer for work item external links. + + Provides complete link information including metadata and timestamps + for managing external resource associations with work items. + """ + + class Meta: + model = IssueLink + fields = "__all__" + read_only_fields = [ + "id", + "workspace", + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + ] + + class IssueAttachmentSerializer(BaseSerializer): + """ + Serializer for work item file attachments. + + Manages file asset associations with work items including metadata, + storage information, and access control for document management. + """ + class Meta: model = FileAsset fields = "__all__" @@ -426,7 +534,47 @@ class IssueAttachmentSerializer(BaseSerializer): ] +class IssueCommentCreateSerializer(BaseSerializer): + """ + Serializer for creating work item comments. + + Handles comment creation with JSON and HTML content support, + access control, and external integration tracking. + """ + + class Meta: + model = IssueComment + fields = [ + "comment_json", + "comment_html", + "access", + "external_source", + "external_id", + ] + read_only_fields = [ + "id", + "workspace", + "project", + "issue", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + "actor", + "comment_stripped", + "edited_at", + ] + + class IssueCommentSerializer(BaseSerializer): + """ + Full serializer for work item comments with membership context. + + Provides complete comment data including member status, content formatting, + and edit tracking for collaborative work item discussions. + """ + is_member = serializers.BooleanField(read_only=True) class Meta: @@ -456,12 +604,26 @@ class IssueCommentSerializer(BaseSerializer): class IssueActivitySerializer(BaseSerializer): + """ + Serializer for work item activity and change history. + + Tracks and represents work item modifications, state changes, + and user interactions for audit trails and activity feeds. + """ + class Meta: model = IssueActivity exclude = ["created_by", "updated_by"] class CycleIssueSerializer(BaseSerializer): + """ + Serializer for work items within cycles. + + Provides cycle context for work items including cycle metadata + and timing information for sprint and iteration management. + """ + cycle = CycleSerializer(read_only=True) class Meta: @@ -469,6 +631,13 @@ class CycleIssueSerializer(BaseSerializer): class ModuleIssueSerializer(BaseSerializer): + """ + Serializer for work items within modules. + + Provides module context for work items including module metadata + and organizational information for feature-based work grouping. + """ + module = ModuleSerializer(read_only=True) class Meta: @@ -476,12 +645,26 @@ class ModuleIssueSerializer(BaseSerializer): class LabelLiteSerializer(BaseSerializer): + """ + Lightweight label serializer for minimal data transfer. + + Provides essential label information with visual properties, + optimized for UI display and performance-critical operations. + """ + class Meta: model = Label fields = ["id", "name", "color"] class IssueExpandSerializer(BaseSerializer): + """ + Extended work item serializer with full relationship expansion. + + Provides work items with expanded related data including cycles, modules, + labels, assignees, and states for comprehensive data representation. + """ + cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True) module = ModuleLiteSerializer(source="issue_module.module", read_only=True) @@ -489,7 +672,6 @@ class IssueExpandSerializer(BaseSerializer): assignees = serializers.SerializerMethodField() state = StateLiteSerializer(read_only=True) - def get_labels(self, obj): expand = self.context.get("expand", []) if "labels" in expand: @@ -507,7 +689,6 @@ class IssueExpandSerializer(BaseSerializer): ).data return [ia.assignee_id for ia in obj.issue_assignee.all()] - class Meta: model = Issue fields = "__all__" @@ -520,3 +701,41 @@ class IssueExpandSerializer(BaseSerializer): "created_at", "updated_at", ] + + +class IssueAttachmentUploadSerializer(serializers.Serializer): + """ + Serializer for work item attachment upload request validation. + + Handles file upload metadata validation including size, type, and external + integration tracking for secure work item document attachment workflows. + """ + + name = serializers.CharField(help_text="Original filename of the asset") + type = serializers.CharField(required=False, help_text="MIME type of the file") + size = serializers.IntegerField(help_text="File size in bytes") + external_id = serializers.CharField( + required=False, + help_text="External identifier for the asset (for integration tracking)", + ) + external_source = serializers.CharField( + required=False, help_text="External source system (for integration tracking)" + ) + + +class IssueSearchSerializer(serializers.Serializer): + """ + Serializer for work item search result data formatting. + + Provides standardized search result structure including work item identifiers, + project context, and workspace information for search API responses. + """ + + id = serializers.CharField(required=True, help_text="Issue ID") + name = serializers.CharField(required=True, help_text="Issue name") + sequence_id = serializers.CharField(required=True, help_text="Issue sequence ID") + project__identifier = serializers.CharField( + required=True, help_text="Project identifier" + ) + project_id = serializers.CharField(required=True, help_text="Project ID") + workspace__slug = serializers.CharField(required=True, help_text="Workspace slug") diff --git a/apps/api/plane/api/serializers/module.py b/apps/api/plane/api/serializers/module.py index ace4e15c8..167386997 100644 --- a/apps/api/plane/api/serializers/module.py +++ b/apps/api/plane/api/serializers/module.py @@ -13,24 +13,33 @@ from plane.db.models import ( ) -class ModuleSerializer(BaseSerializer): +class ModuleCreateSerializer(BaseSerializer): + """ + Serializer for creating modules with member validation and date checking. + + Handles module creation including member assignment validation, date range verification, + and duplicate name prevention for feature-based project organization setup. + """ + members = serializers.ListField( - child=serializers.PrimaryKeyRelatedField( - queryset=User.objects.values_list("id", flat=True) - ), + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), write_only=True, required=False, ) - total_issues = serializers.IntegerField(read_only=True) - cancelled_issues = serializers.IntegerField(read_only=True) - completed_issues = serializers.IntegerField(read_only=True) - started_issues = serializers.IntegerField(read_only=True) - unstarted_issues = serializers.IntegerField(read_only=True) - backlog_issues = serializers.IntegerField(read_only=True) class Meta: model = Module - fields = "__all__" + fields = [ + "name", + "description", + "start_date", + "target_date", + "status", + "lead", + "members", + "external_source", + "external_id", + ] read_only_fields = [ "id", "workspace", @@ -42,11 +51,6 @@ class ModuleSerializer(BaseSerializer): "deleted_at", ] - def to_representation(self, instance): - data = super().to_representation(instance) - data["members"] = [str(member.id) for member in instance.members.all()] - return data - def validate(self, data): if ( data.get("start_date", None) is not None @@ -96,6 +100,22 @@ class ModuleSerializer(BaseSerializer): return module + +class ModuleUpdateSerializer(ModuleCreateSerializer): + """ + Serializer for updating modules with enhanced validation and member management. + + Extends module creation with update-specific validations including member reassignment, + name conflict checking, and relationship management for module modifications. + """ + + class Meta(ModuleCreateSerializer.Meta): + model = Module + fields = ModuleCreateSerializer.Meta.fields + [ + "members", + ] + read_only_fields = ModuleCreateSerializer.Meta.read_only_fields + def update(self, instance, validated_data): members = validated_data.pop("members", None) module_name = validated_data.get("name") @@ -131,7 +151,54 @@ class ModuleSerializer(BaseSerializer): return super().update(instance, validated_data) +class ModuleSerializer(BaseSerializer): + """ + Comprehensive module serializer with work item metrics and member management. + + Provides complete module data including work item counts by status, member relationships, + and progress tracking for feature-based project organization. + """ + + members = serializers.ListField( + child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()), + write_only=True, + required=False, + ) + total_issues = serializers.IntegerField(read_only=True) + cancelled_issues = serializers.IntegerField(read_only=True) + completed_issues = serializers.IntegerField(read_only=True) + started_issues = serializers.IntegerField(read_only=True) + unstarted_issues = serializers.IntegerField(read_only=True) + backlog_issues = serializers.IntegerField(read_only=True) + + class Meta: + model = Module + fields = "__all__" + read_only_fields = [ + "id", + "workspace", + "project", + "created_by", + "updated_by", + "created_at", + "updated_at", + "deleted_at", + ] + + def to_representation(self, instance): + data = super().to_representation(instance) + data["members"] = [str(member.id) for member in instance.members.all()] + return data + + class ModuleIssueSerializer(BaseSerializer): + """ + Serializer for module-work item relationships with sub-item counting. + + Manages the association between modules and work items, including + hierarchical issue tracking for nested work item structures. + """ + sub_issues_count = serializers.IntegerField(read_only=True) class Meta: @@ -149,6 +216,13 @@ class ModuleIssueSerializer(BaseSerializer): class ModuleLinkSerializer(BaseSerializer): + """ + Serializer for module external links with URL validation. + + Handles external resource associations with modules including + URL validation and duplicate prevention for reference management. + """ + class Meta: model = ModuleLink fields = "__all__" @@ -174,6 +248,27 @@ class ModuleLinkSerializer(BaseSerializer): class ModuleLiteSerializer(BaseSerializer): + """ + Lightweight module serializer for minimal data transfer. + + Provides essential module information without computed metrics, + optimized for list views and reference lookups. + """ + class Meta: model = Module fields = "__all__" + + +class ModuleIssueRequestSerializer(serializers.Serializer): + """ + Serializer for bulk work item assignment to modules. + + Validates work item ID lists for batch operations including + module assignment and work item organization workflows. + """ + + issues = serializers.ListField( + child=serializers.UUIDField(), + help_text="List of issue IDs to add to the module", + ) diff --git a/apps/api/plane/api/serializers/project.py b/apps/api/plane/api/serializers/project.py index 10ae7f4de..d860c46b2 100644 --- a/apps/api/plane/api/serializers/project.py +++ b/apps/api/plane/api/serializers/project.py @@ -2,17 +2,153 @@ from rest_framework import serializers # Module imports -from plane.db.models import Project, ProjectIdentifier, WorkspaceMember -from plane.utils.content_validator import ( - validate_html_content, - validate_json_content, - validate_binary_data, +from plane.db.models import ( + Project, + ProjectIdentifier, + WorkspaceMember, + State, + Estimate, ) +from plane.utils.content_validator import ( + validate_html_content, +) from .base import BaseSerializer +class ProjectCreateSerializer(BaseSerializer): + """ + Serializer for creating projects with workspace validation. + + Handles project creation including identifier validation, member verification, + and workspace association for new project initialization. + """ + + class Meta: + model = Project + fields = [ + "name", + "description", + "project_lead", + "default_assignee", + "identifier", + "icon_prop", + "emoji", + "cover_image", + "module_view", + "cycle_view", + "issue_views_view", + "page_view", + "intake_view", + "guest_view_all_features", + "archive_in", + "close_in", + "timezone", + "logo_props", + "external_source", + "external_id", + "is_issue_type_enabled", + ] + + read_only_fields = [ + "id", + "workspace", + "created_at", + "updated_at", + "created_by", + "updated_by", + ] + + def validate(self, data): + if data.get("project_lead", None) is not None: + # Check if the project lead is a member of the workspace + if not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("project_lead"), + ).exists(): + raise serializers.ValidationError( + "Project lead should be a user in the workspace" + ) + + if data.get("default_assignee", None) is not None: + # Check if the default assignee is a member of the workspace + if not WorkspaceMember.objects.filter( + workspace_id=self.context["workspace_id"], + member_id=data.get("default_assignee"), + ).exists(): + raise serializers.ValidationError( + "Default assignee should be a user in the workspace" + ) + + return data + + def create(self, validated_data): + identifier = validated_data.get("identifier", "").strip().upper() + if identifier == "": + raise serializers.ValidationError(detail="Project Identifier is required") + + if ProjectIdentifier.objects.filter( + name=identifier, workspace_id=self.context["workspace_id"] + ).exists(): + raise serializers.ValidationError(detail="Project Identifier is taken") + + project = Project.objects.create( + **validated_data, workspace_id=self.context["workspace_id"] + ) + return project + + +class ProjectUpdateSerializer(ProjectCreateSerializer): + """ + Serializer for updating projects with enhanced state and estimation management. + + Extends project creation with update-specific validations including default state + assignment, estimation configuration, and project setting modifications. + """ + + class Meta(ProjectCreateSerializer.Meta): + model = Project + fields = ProjectCreateSerializer.Meta.fields + [ + "default_state", + "estimate", + ] + + read_only_fields = ProjectCreateSerializer.Meta.read_only_fields + + def update(self, instance, validated_data): + """Update a project""" + if ( + validated_data.get("default_state", None) is not None + and not State.objects.filter( + project=instance, id=validated_data.get("default_state") + ).exists() + ): + # Check if the default state is a state in the project + raise serializers.ValidationError( + "Default state should be a state in the project" + ) + + if ( + validated_data.get("estimate", None) is not None + and not Estimate.objects.filter( + project=instance, id=validated_data.get("estimate") + ).exists() + ): + # Check if the estimate is a estimate in the project + raise serializers.ValidationError( + "Estimate should be a estimate in the project" + ) + return super().update(instance, validated_data) + + class ProjectSerializer(BaseSerializer): + """ + Comprehensive project serializer with metrics and member context. + + Provides complete project data including member counts, cycle/module totals, + deployment status, and user-specific context for project management. + """ + total_members = serializers.IntegerField(read_only=True) total_cycles = serializers.IntegerField(read_only=True) total_modules = serializers.IntegerField(read_only=True) @@ -63,27 +199,18 @@ class ProjectSerializer(BaseSerializer): ) # Validate description content for security - if "description" in data and data["description"]: - # For Project, description might be text field, not JSON - if isinstance(data["description"], dict): - is_valid, error_msg = validate_json_content(data["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - - if "description_text" in data and data["description_text"]: - is_valid, error_msg = validate_json_content(data["description_text"]) - if not is_valid: - raise serializers.ValidationError({"description_text": error_msg}) - if "description_html" in data and data["description_html"]: if isinstance(data["description_html"], dict): - is_valid, error_msg = validate_json_content(data["description_html"]) - else: - is_valid, error_msg = validate_html_content( + is_valid, error_msg, sanitized_html = validate_html_content( str(data["description_html"]) ) + # Update the data with sanitized HTML if available + if sanitized_html is not None: + data["description_html"] = sanitized_html if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) return data @@ -109,6 +236,13 @@ class ProjectSerializer(BaseSerializer): class ProjectLiteSerializer(BaseSerializer): + """ + Lightweight project serializer for minimal data transfer. + + Provides essential project information including identifiers, visual properties, + and basic metadata optimized for list views and references. + """ + cover_image_url = serializers.CharField(read_only=True) class Meta: diff --git a/apps/api/plane/api/serializers/state.py b/apps/api/plane/api/serializers/state.py index 85b4c41ed..150c238fc 100644 --- a/apps/api/plane/api/serializers/state.py +++ b/apps/api/plane/api/serializers/state.py @@ -4,6 +4,13 @@ from plane.db.models import State class StateSerializer(BaseSerializer): + """ + Serializer for work item states with default state management. + + Handles state creation and updates including default state validation + and automatic default state switching for workflow management. + """ + def validate(self, data): # If the default is being provided then make all other states default False if data.get("default", False): @@ -24,10 +31,18 @@ class StateSerializer(BaseSerializer): "workspace", "project", "deleted_at", + "slug", ] class StateLiteSerializer(BaseSerializer): + """ + Lightweight state serializer for minimal data transfer. + + Provides essential state information including visual properties + and grouping data optimized for UI display and filtering. + """ + class Meta: model = State fields = ["id", "name", "color", "group"] diff --git a/apps/api/plane/api/serializers/user.py b/apps/api/plane/api/serializers/user.py index b266d7d54..805eb9fe1 100644 --- a/apps/api/plane/api/serializers/user.py +++ b/apps/api/plane/api/serializers/user.py @@ -1,3 +1,5 @@ +from rest_framework import serializers + # Module imports from plane.db.models import User @@ -5,6 +7,18 @@ from .base import BaseSerializer class UserLiteSerializer(BaseSerializer): + """ + Lightweight user serializer for minimal data transfer. + + Provides essential user information including names, avatar, and contact details + optimized for member lists, assignee displays, and user references. + """ + + avatar_url = serializers.CharField( + help_text="Avatar URL", + read_only=True, + ) + class Meta: model = User fields = [ diff --git a/apps/api/plane/api/serializers/workspace.py b/apps/api/plane/api/serializers/workspace.py index 84453b8e0..e98683c2f 100644 --- a/apps/api/plane/api/serializers/workspace.py +++ b/apps/api/plane/api/serializers/workspace.py @@ -4,7 +4,12 @@ from .base import BaseSerializer class WorkspaceLiteSerializer(BaseSerializer): - """Lite serializer with only required fields""" + """ + Lightweight workspace serializer for minimal data transfer. + + Provides essential workspace identifiers including name, slug, and ID + optimized for navigation, references, and performance-critical operations. + """ class Meta: model = Workspace diff --git a/apps/api/plane/api/urls/__init__.py b/apps/api/plane/api/urls/__init__.py index d9b55e20e..ed187549d 100644 --- a/apps/api/plane/api/urls/__init__.py +++ b/apps/api/plane/api/urls/__init__.py @@ -5,8 +5,11 @@ from .cycle import urlpatterns as cycle_patterns from .module import urlpatterns as module_patterns from .intake import urlpatterns as intake_patterns from .member import urlpatterns as member_patterns +from .asset import urlpatterns as asset_patterns +from .user import urlpatterns as user_patterns urlpatterns = [ + *asset_patterns, *project_patterns, *state_patterns, *issue_patterns, @@ -14,4 +17,5 @@ urlpatterns = [ *module_patterns, *intake_patterns, *member_patterns, + *user_patterns, ] diff --git a/apps/api/plane/api/urls/asset.py b/apps/api/plane/api/urls/asset.py new file mode 100644 index 000000000..5bdd4d914 --- /dev/null +++ b/apps/api/plane/api/urls/asset.py @@ -0,0 +1,40 @@ +from django.urls import path + +from plane.api.views import ( + UserAssetEndpoint, + UserServerAssetEndpoint, + GenericAssetEndpoint, +) + +urlpatterns = [ + path( + "assets/user-assets/", + UserAssetEndpoint.as_view(http_method_names=["post"]), + name="user-assets", + ), + path( + "assets/user-assets//", + UserAssetEndpoint.as_view(http_method_names=["patch", "delete"]), + name="user-assets-detail", + ), + path( + "assets/user-assets/server/", + UserServerAssetEndpoint.as_view(http_method_names=["post"]), + name="user-server-assets", + ), + path( + "assets/user-assets//server/", + UserServerAssetEndpoint.as_view(http_method_names=["patch", "delete"]), + name="user-server-assets-detail", + ), + path( + "workspaces//assets/", + GenericAssetEndpoint.as_view(http_method_names=["post"]), + name="generic-asset", + ), + path( + "workspaces//assets//", + GenericAssetEndpoint.as_view(http_method_names=["get", "patch"]), + name="generic-asset-detail", + ), +] diff --git a/apps/api/plane/api/urls/cycle.py b/apps/api/plane/api/urls/cycle.py index b0ae21174..bd7136aa2 100644 --- a/apps/api/plane/api/urls/cycle.py +++ b/apps/api/plane/api/urls/cycle.py @@ -1,8 +1,10 @@ from django.urls import path from plane.api.views.cycle import ( - CycleAPIEndpoint, - CycleIssueAPIEndpoint, + CycleListCreateAPIEndpoint, + CycleDetailAPIEndpoint, + CycleIssueListCreateAPIEndpoint, + CycleIssueDetailAPIEndpoint, TransferCycleIssueAPIEndpoint, CycleArchiveUnarchiveAPIEndpoint, ) @@ -10,37 +12,42 @@ from plane.api.views.cycle import ( urlpatterns = [ path( "workspaces//projects//cycles/", - CycleAPIEndpoint.as_view(), + CycleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="cycles", ), path( "workspaces//projects//cycles//", - CycleAPIEndpoint.as_view(), + CycleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="cycles", ), path( "workspaces//projects//cycles//cycle-issues/", - CycleIssueAPIEndpoint.as_view(), + CycleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="cycle-issues", ), path( "workspaces//projects//cycles//cycle-issues//", - CycleIssueAPIEndpoint.as_view(), + CycleIssueDetailAPIEndpoint.as_view(http_method_names=["get", "delete"]), name="cycle-issues", ), path( "workspaces//projects//cycles//transfer-issues/", - TransferCycleIssueAPIEndpoint.as_view(), + TransferCycleIssueAPIEndpoint.as_view(http_method_names=["post"]), name="transfer-issues", ), path( "workspaces//projects//cycles//archive/", - CycleArchiveUnarchiveAPIEndpoint.as_view(), + CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]), name="cycle-archive-unarchive", ), path( "workspaces//projects//archived-cycles/", - CycleArchiveUnarchiveAPIEndpoint.as_view(), + CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]), + name="cycle-archive-unarchive", + ), + path( + "workspaces//projects//archived-cycles//unarchive/", + CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]), name="cycle-archive-unarchive", ), ] diff --git a/apps/api/plane/api/urls/intake.py b/apps/api/plane/api/urls/intake.py index 4ef41d5f0..6af4aa4a8 100644 --- a/apps/api/plane/api/urls/intake.py +++ b/apps/api/plane/api/urls/intake.py @@ -1,17 +1,22 @@ from django.urls import path -from plane.api.views import IntakeIssueAPIEndpoint +from plane.api.views import ( + IntakeIssueListCreateAPIEndpoint, + IntakeIssueDetailAPIEndpoint, +) urlpatterns = [ path( "workspaces//projects//intake-issues/", - IntakeIssueAPIEndpoint.as_view(), + IntakeIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="intake-issue", ), path( "workspaces//projects//intake-issues//", - IntakeIssueAPIEndpoint.as_view(), + IntakeIssueDetailAPIEndpoint.as_view( + http_method_names=["get", "patch", "delete"] + ), name="intake-issue", ), ] diff --git a/apps/api/plane/api/urls/issue.py b/apps/api/plane/api/urls/issue.py index 71ab39855..df64684de 100644 --- a/apps/api/plane/api/urls/issue.py +++ b/apps/api/plane/api/urls/issue.py @@ -1,79 +1,97 @@ from django.urls import path from plane.api.views import ( - IssueAPIEndpoint, - LabelAPIEndpoint, - IssueLinkAPIEndpoint, - IssueCommentAPIEndpoint, - IssueActivityAPIEndpoint, + IssueListCreateAPIEndpoint, + IssueDetailAPIEndpoint, + LabelListCreateAPIEndpoint, + LabelDetailAPIEndpoint, + IssueLinkListCreateAPIEndpoint, + IssueLinkDetailAPIEndpoint, + IssueCommentListCreateAPIEndpoint, + IssueCommentDetailAPIEndpoint, + IssueActivityListAPIEndpoint, + IssueActivityDetailAPIEndpoint, + IssueAttachmentListCreateAPIEndpoint, + IssueAttachmentDetailAPIEndpoint, WorkspaceIssueAPIEndpoint, - IssueAttachmentEndpoint, + IssueSearchEndpoint, ) urlpatterns = [ path( - "workspaces//issues/-/", - WorkspaceIssueAPIEndpoint.as_view(), + "workspaces//issues/search/", + IssueSearchEndpoint.as_view(http_method_names=["get"]), + name="issue-search", + ), + path( + "workspaces//issues/-/", + WorkspaceIssueAPIEndpoint.as_view(http_method_names=["get"]), name="issue-by-identifier", ), path( "workspaces//projects//issues/", - IssueAPIEndpoint.as_view(), + IssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="issue", ), path( "workspaces//projects//issues//", - IssueAPIEndpoint.as_view(), + IssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="issue", ), path( "workspaces//projects//labels/", - LabelAPIEndpoint.as_view(), + LabelListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="label", ), path( "workspaces//projects//labels//", - LabelAPIEndpoint.as_view(), + LabelDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="label", ), path( "workspaces//projects//issues//links/", - IssueLinkAPIEndpoint.as_view(), + IssueLinkListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="link", ), path( "workspaces//projects//issues//links//", - IssueLinkAPIEndpoint.as_view(), + IssueLinkDetailAPIEndpoint.as_view( + http_method_names=["get", "patch", "delete"] + ), name="link", ), path( "workspaces//projects//issues//comments/", - IssueCommentAPIEndpoint.as_view(), + IssueCommentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="comment", ), path( "workspaces//projects//issues//comments//", - IssueCommentAPIEndpoint.as_view(), + IssueCommentDetailAPIEndpoint.as_view( + http_method_names=["get", "patch", "delete"] + ), name="comment", ), path( "workspaces//projects//issues//activities/", - IssueActivityAPIEndpoint.as_view(), + IssueActivityListAPIEndpoint.as_view(http_method_names=["get"]), name="activity", ), path( "workspaces//projects//issues//activities//", - IssueActivityAPIEndpoint.as_view(), + IssueActivityDetailAPIEndpoint.as_view(http_method_names=["get"]), name="activity", ), path( "workspaces//projects//issues//issue-attachments/", - IssueAttachmentEndpoint.as_view(), + IssueAttachmentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="attachment", ), path( "workspaces//projects//issues//issue-attachments//", - IssueAttachmentEndpoint.as_view(), + IssueAttachmentDetailAPIEndpoint.as_view( + http_method_names=["get", "patch", "delete"] + ), name="issue-attachment", ), ] diff --git a/apps/api/plane/api/urls/member.py b/apps/api/plane/api/urls/member.py index 1ec9cddb3..a2b331ea1 100644 --- a/apps/api/plane/api/urls/member.py +++ b/apps/api/plane/api/urls/member.py @@ -1,11 +1,16 @@ from django.urls import path -from plane.api.views import ProjectMemberAPIEndpoint +from plane.api.views import ProjectMemberAPIEndpoint, WorkspaceMemberAPIEndpoint urlpatterns = [ path( - "workspaces//projects//members/", - ProjectMemberAPIEndpoint.as_view(), - name="users", - ) + "workspaces//projects//members/", + ProjectMemberAPIEndpoint.as_view(http_method_names=["get"]), + name="project-members", + ), + path( + "workspaces//members/", + WorkspaceMemberAPIEndpoint.as_view(http_method_names=["get"]), + name="workspace-members", + ), ] diff --git a/apps/api/plane/api/urls/module.py b/apps/api/plane/api/urls/module.py index a131f4d4f..578f5c860 100644 --- a/apps/api/plane/api/urls/module.py +++ b/apps/api/plane/api/urls/module.py @@ -1,40 +1,47 @@ from django.urls import path from plane.api.views import ( - ModuleAPIEndpoint, - ModuleIssueAPIEndpoint, + ModuleListCreateAPIEndpoint, + ModuleDetailAPIEndpoint, + ModuleIssueListCreateAPIEndpoint, + ModuleIssueDetailAPIEndpoint, ModuleArchiveUnarchiveAPIEndpoint, ) urlpatterns = [ path( "workspaces//projects//modules/", - ModuleAPIEndpoint.as_view(), + ModuleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="modules", ), path( "workspaces//projects//modules//", - ModuleAPIEndpoint.as_view(), - name="modules", + ModuleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), + name="modules-detail", ), path( "workspaces//projects//modules//module-issues/", - ModuleIssueAPIEndpoint.as_view(), + ModuleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="module-issues", ), path( "workspaces//projects//modules//module-issues//", - ModuleIssueAPIEndpoint.as_view(), - name="module-issues", + ModuleIssueDetailAPIEndpoint.as_view(http_method_names=["delete"]), + name="module-issues-detail", ), path( "workspaces//projects//modules//archive/", - ModuleArchiveUnarchiveAPIEndpoint.as_view(), - name="module-archive-unarchive", + ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]), + name="module-archive", ), path( "workspaces//projects//archived-modules/", - ModuleArchiveUnarchiveAPIEndpoint.as_view(), - name="module-archive-unarchive", + ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]), + name="module-archive-list", + ), + path( + "workspaces//projects//archived-modules//unarchive/", + ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]), + name="module-unarchive", ), ] diff --git a/apps/api/plane/api/urls/project.py b/apps/api/plane/api/urls/project.py index d35c2cdd5..4cfc5a198 100644 --- a/apps/api/plane/api/urls/project.py +++ b/apps/api/plane/api/urls/project.py @@ -1,19 +1,27 @@ from django.urls import path -from plane.api.views import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint +from plane.api.views import ( + ProjectListCreateAPIEndpoint, + ProjectDetailAPIEndpoint, + ProjectArchiveUnarchiveAPIEndpoint, +) urlpatterns = [ path( - "workspaces//projects/", ProjectAPIEndpoint.as_view(), name="project" + "workspaces//projects/", + ProjectListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), + name="project", ), path( "workspaces//projects//", - ProjectAPIEndpoint.as_view(), + ProjectDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="project", ), path( "workspaces//projects//archive/", - ProjectArchiveUnarchiveAPIEndpoint.as_view(), + ProjectArchiveUnarchiveAPIEndpoint.as_view( + http_method_names=["post", "delete"] + ), name="project-archive-unarchive", ), ] diff --git a/apps/api/plane/api/urls/schema.py b/apps/api/plane/api/urls/schema.py new file mode 100644 index 000000000..781dbe9de --- /dev/null +++ b/apps/api/plane/api/urls/schema.py @@ -0,0 +1,20 @@ +from drf_spectacular.views import ( + SpectacularAPIView, + SpectacularRedocView, + SpectacularSwaggerView, +) +from django.urls import path + +urlpatterns = [ + path("schema/", SpectacularAPIView.as_view(), name="schema"), + path( + "schema/swagger-ui/", + SpectacularSwaggerView.as_view(url_name="schema"), + name="swagger-ui", + ), + path( + "schema/redoc/", + SpectacularRedocView.as_view(url_name="schema"), + name="redoc", + ), +] diff --git a/apps/api/plane/api/urls/state.py b/apps/api/plane/api/urls/state.py index b03f386e6..e35012a20 100644 --- a/apps/api/plane/api/urls/state.py +++ b/apps/api/plane/api/urls/state.py @@ -1,16 +1,19 @@ from django.urls import path -from plane.api.views import StateAPIEndpoint +from plane.api.views import ( + StateListCreateAPIEndpoint, + StateDetailAPIEndpoint, +) urlpatterns = [ path( "workspaces//projects//states/", - StateAPIEndpoint.as_view(), + StateListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]), name="states", ), path( "workspaces//projects//states//", - StateAPIEndpoint.as_view(), + StateDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]), name="states", ), ] diff --git a/apps/api/plane/api/urls/user.py b/apps/api/plane/api/urls/user.py new file mode 100644 index 000000000..461b08333 --- /dev/null +++ b/apps/api/plane/api/urls/user.py @@ -0,0 +1,11 @@ +from django.urls import path + +from plane.api.views import UserEndpoint + +urlpatterns = [ + path( + "users/me/", + UserEndpoint.as_view(http_method_names=["get"]), + name="users", + ), +] diff --git a/apps/api/plane/api/views/__init__.py b/apps/api/plane/api/views/__init__.py index 2299f7ec5..8535d4858 100644 --- a/apps/api/plane/api/views/__init__.py +++ b/apps/api/plane/api/views/__init__.py @@ -1,30 +1,55 @@ -from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint +from .project import ( + ProjectListCreateAPIEndpoint, + ProjectDetailAPIEndpoint, + ProjectArchiveUnarchiveAPIEndpoint, +) -from .state import StateAPIEndpoint +from .state import ( + StateListCreateAPIEndpoint, + StateDetailAPIEndpoint, +) from .issue import ( WorkspaceIssueAPIEndpoint, - IssueAPIEndpoint, - LabelAPIEndpoint, - IssueLinkAPIEndpoint, - IssueCommentAPIEndpoint, - IssueActivityAPIEndpoint, - IssueAttachmentEndpoint, + IssueListCreateAPIEndpoint, + IssueDetailAPIEndpoint, + LabelListCreateAPIEndpoint, + LabelDetailAPIEndpoint, + IssueLinkListCreateAPIEndpoint, + IssueLinkDetailAPIEndpoint, + IssueCommentListCreateAPIEndpoint, + IssueCommentDetailAPIEndpoint, + IssueActivityListAPIEndpoint, + IssueActivityDetailAPIEndpoint, + IssueAttachmentListCreateAPIEndpoint, + IssueAttachmentDetailAPIEndpoint, + IssueSearchEndpoint, ) from .cycle import ( - CycleAPIEndpoint, - CycleIssueAPIEndpoint, + CycleListCreateAPIEndpoint, + CycleDetailAPIEndpoint, + CycleIssueListCreateAPIEndpoint, + CycleIssueDetailAPIEndpoint, TransferCycleIssueAPIEndpoint, CycleArchiveUnarchiveAPIEndpoint, ) from .module import ( - ModuleAPIEndpoint, - ModuleIssueAPIEndpoint, + ModuleListCreateAPIEndpoint, + ModuleDetailAPIEndpoint, + ModuleIssueListCreateAPIEndpoint, + ModuleIssueDetailAPIEndpoint, ModuleArchiveUnarchiveAPIEndpoint, ) -from .member import ProjectMemberAPIEndpoint +from .member import ProjectMemberAPIEndpoint, WorkspaceMemberAPIEndpoint -from .intake import IntakeIssueAPIEndpoint +from .intake import ( + IntakeIssueListCreateAPIEndpoint, + IntakeIssueDetailAPIEndpoint, +) + +from .asset import UserAssetEndpoint, UserServerAssetEndpoint, GenericAssetEndpoint + +from .user import UserEndpoint diff --git a/apps/api/plane/api/views/asset.py b/apps/api/plane/api/views/asset.py new file mode 100644 index 000000000..2e668c15d --- /dev/null +++ b/apps/api/plane/api/views/asset.py @@ -0,0 +1,631 @@ +# Python Imports +import uuid + +# Django Imports +from django.utils import timezone +from django.conf import settings + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from drf_spectacular.utils import OpenApiExample, OpenApiRequest, OpenApiTypes + +# Module Imports +from plane.bgtasks.storage_metadata_task import get_asset_object_metadata +from plane.settings.storage import S3Storage +from plane.db.models import FileAsset, User, Workspace +from plane.api.views.base import BaseAPIView +from plane.api.serializers import ( + UserAssetUploadSerializer, + AssetUpdateSerializer, + GenericAssetUploadSerializer, + GenericAssetUpdateSerializer, +) +from plane.utils.openapi import ( + ASSET_ID_PARAMETER, + WORKSPACE_SLUG_PARAMETER, + PRESIGNED_URL_SUCCESS_RESPONSE, + GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE, + GENERIC_ASSET_VALIDATION_ERROR_RESPONSE, + ASSET_CONFLICT_RESPONSE, + ASSET_DOWNLOAD_SUCCESS_RESPONSE, + ASSET_DOWNLOAD_ERROR_RESPONSE, + ASSET_UPDATED_RESPONSE, + ASSET_DELETED_RESPONSE, + VALIDATION_ERROR_RESPONSE, + ASSET_NOT_FOUND_RESPONSE, + NOT_FOUND_RESPONSE, + UNAUTHORIZED_RESPONSE, + asset_docs, +) +from plane.utils.exception_logger import log_exception + + +class UserAssetEndpoint(BaseAPIView): + """This endpoint is used to upload user profile images.""" + + def asset_delete(self, asset_id): + asset = FileAsset.objects.filter(id=asset_id).first() + if asset is None: + return + asset.is_deleted = True + asset.deleted_at = timezone.now() + asset.save(update_fields=["is_deleted", "deleted_at"]) + return + + def entity_asset_delete(self, entity_type, asset, request): + # User Avatar + if entity_type == FileAsset.EntityTypeContext.USER_AVATAR: + user = User.objects.get(id=asset.user_id) + user.avatar_asset_id = None + user.save() + return + # User Cover + if entity_type == FileAsset.EntityTypeContext.USER_COVER: + user = User.objects.get(id=asset.user_id) + user.cover_image_asset_id = None + user.save() + return + return + + @asset_docs( + operation_id="create_user_asset_upload", + summary="Generate presigned URL for user asset upload", + description="Generate presigned URL for user asset upload", + request=OpenApiRequest( + request=UserAssetUploadSerializer, + examples=[ + OpenApiExample( + "User Avatar Upload", + value={ + "name": "profile.jpg", + "type": "image/jpeg", + "size": 1024000, + "entity_type": "USER_AVATAR", + }, + description="Example request for uploading a user avatar", + ), + OpenApiExample( + "User Cover Upload", + value={ + "name": "cover.jpg", + "type": "image/jpeg", + "size": 1024000, + "entity_type": "USER_COVER", + }, + description="Example request for uploading a user cover", + ), + ], + ), + responses={ + 200: PRESIGNED_URL_SUCCESS_RESPONSE, + 400: VALIDATION_ERROR_RESPONSE, + 401: UNAUTHORIZED_RESPONSE, + }, + ) + def post(self, request): + """Generate presigned URL for user asset upload. + + Create a presigned URL for uploading user profile assets (avatar or cover image). + This endpoint generates the necessary credentials for direct S3 upload. + """ + # get the asset key + name = request.data.get("name") + type = request.data.get("type", "image/jpeg") + size = int(request.data.get("size", settings.FILE_SIZE_LIMIT)) + entity_type = request.data.get("entity_type", False) + + # Check if the file size is within the limit + size_limit = min(size, settings.FILE_SIZE_LIMIT) + + # Check if the entity type is allowed + if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]: + return Response( + {"error": "Invalid entity type.", "status": False}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if the file type is allowed + allowed_types = [ + "image/jpeg", + "image/png", + "image/webp", + "image/jpg", + "image/gif", + ] + if type not in allowed_types: + return Response( + { + "error": "Invalid file type. Only JPEG and PNG files are allowed.", + "status": False, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # asset key + asset_key = f"{uuid.uuid4().hex}-{name}" + + # Create a File Asset + asset = FileAsset.objects.create( + attributes={"name": name, "type": type, "size": size_limit}, + asset=asset_key, + size=size_limit, + user=request.user, + created_by=request.user, + entity_type=entity_type, + ) + + # Get the presigned URL + storage = S3Storage(request=request) + # Generate a presigned URL to share an S3 object + presigned_url = storage.generate_presigned_post( + object_name=asset_key, file_type=type, file_size=size_limit + ) + # Return the presigned URL + return Response( + { + "upload_data": presigned_url, + "asset_id": str(asset.id), + "asset_url": asset.asset_url, + }, + status=status.HTTP_200_OK, + ) + + @asset_docs( + operation_id="update_user_asset", + summary="Mark user asset as uploaded", + description="Mark user asset as uploaded", + parameters=[ASSET_ID_PARAMETER], + request=OpenApiRequest( + request=AssetUpdateSerializer, + examples=[ + OpenApiExample( + "Update Asset Attributes", + value={ + "attributes": { + "name": "updated_profile.jpg", + "type": "image/jpeg", + "size": 1024000, + }, + "entity_type": "USER_AVATAR", + }, + description="Example request for updating asset attributes", + ), + ], + ), + responses={ + 204: ASSET_UPDATED_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + ) + def patch(self, request, asset_id): + """Update user asset after upload completion. + + Update the asset status and attributes after the file has been uploaded to S3. + This endpoint should be called after completing the S3 upload to mark the asset as uploaded. + """ + # get the asset id + asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id) + # get the storage metadata + asset.is_uploaded = True + # get the storage metadata + if not asset.storage_metadata: + get_asset_object_metadata.delay(asset_id=str(asset_id)) + # update the attributes + asset.attributes = request.data.get("attributes", asset.attributes) + # save the asset + asset.save(update_fields=["is_uploaded", "attributes"]) + return Response(status=status.HTTP_204_NO_CONTENT) + + @asset_docs( + operation_id="delete_user_asset", + summary="Delete user asset", + parameters=[ASSET_ID_PARAMETER], + responses={ + 204: ASSET_DELETED_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + ) + def delete(self, request, asset_id): + """Delete user asset. + + Delete a user profile asset (avatar or cover image) and remove its reference from the user profile. + This performs a soft delete by marking the asset as deleted and updating the user's profile. + """ + asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id) + asset.is_deleted = True + asset.deleted_at = timezone.now() + # get the entity and save the asset id for the request field + self.entity_asset_delete( + entity_type=asset.entity_type, asset=asset, request=request + ) + asset.save(update_fields=["is_deleted", "deleted_at"]) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class UserServerAssetEndpoint(BaseAPIView): + """This endpoint is used to upload user profile images.""" + + def asset_delete(self, asset_id): + asset = FileAsset.objects.filter(id=asset_id).first() + if asset is None: + return + asset.is_deleted = True + asset.deleted_at = timezone.now() + asset.save(update_fields=["is_deleted", "deleted_at"]) + return + + def entity_asset_delete(self, entity_type, asset, request): + # User Avatar + if entity_type == FileAsset.EntityTypeContext.USER_AVATAR: + user = User.objects.get(id=asset.user_id) + user.avatar_asset_id = None + user.save() + return + # User Cover + if entity_type == FileAsset.EntityTypeContext.USER_COVER: + user = User.objects.get(id=asset.user_id) + user.cover_image_asset_id = None + user.save() + return + return + + @asset_docs( + operation_id="create_user_server_asset_upload", + summary="Generate presigned URL for user server asset upload", + request=UserAssetUploadSerializer, + responses={ + 200: PRESIGNED_URL_SUCCESS_RESPONSE, + 400: VALIDATION_ERROR_RESPONSE, + }, + ) + def post(self, request): + """Generate presigned URL for user server asset upload. + + Create a presigned URL for uploading user profile assets (avatar or cover image) using server credentials. + This endpoint generates the necessary credentials for direct S3 upload with server-side authentication. + """ + # get the asset key + name = request.data.get("name") + type = request.data.get("type", "image/jpeg") + size = int(request.data.get("size", settings.FILE_SIZE_LIMIT)) + entity_type = request.data.get("entity_type", False) + + # Check if the file size is within the limit + size_limit = min(size, settings.FILE_SIZE_LIMIT) + + # Check if the entity type is allowed + if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]: + return Response( + {"error": "Invalid entity type.", "status": False}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if the file type is allowed + allowed_types = [ + "image/jpeg", + "image/png", + "image/webp", + "image/jpg", + "image/gif", + ] + if type not in allowed_types: + return Response( + { + "error": "Invalid file type. Only JPEG and PNG files are allowed.", + "status": False, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # asset key + asset_key = f"{uuid.uuid4().hex}-{name}" + + # Create a File Asset + asset = FileAsset.objects.create( + attributes={"name": name, "type": type, "size": size_limit}, + asset=asset_key, + size=size_limit, + user=request.user, + created_by=request.user, + entity_type=entity_type, + ) + + # Get the presigned URL + storage = S3Storage(request=request, is_server=True) + # Generate a presigned URL to share an S3 object + presigned_url = storage.generate_presigned_post( + object_name=asset_key, file_type=type, file_size=size_limit + ) + # Return the presigned URL + return Response( + { + "upload_data": presigned_url, + "asset_id": str(asset.id), + "asset_url": asset.asset_url, + }, + status=status.HTTP_200_OK, + ) + + @asset_docs( + operation_id="update_user_server_asset", + summary="Mark user server asset as uploaded", + parameters=[ASSET_ID_PARAMETER], + request=AssetUpdateSerializer, + responses={ + 204: ASSET_UPDATED_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + ) + def patch(self, request, asset_id): + """Update user server asset after upload completion. + + Update the asset status and attributes after the file has been uploaded to S3 using server credentials. + This endpoint should be called after completing the S3 upload to mark the asset as uploaded. + """ + # get the asset id + asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id) + # get the storage metadata + asset.is_uploaded = True + # get the storage metadata + if not asset.storage_metadata: + get_asset_object_metadata.delay(asset_id=str(asset_id)) + # update the attributes + asset.attributes = request.data.get("attributes", asset.attributes) + # save the asset + asset.save(update_fields=["is_uploaded", "attributes"]) + return Response(status=status.HTTP_204_NO_CONTENT) + + @asset_docs( + operation_id="delete_user_server_asset", + summary="Delete user server asset", + parameters=[ASSET_ID_PARAMETER], + responses={ + 204: ASSET_DELETED_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + ) + def delete(self, request, asset_id): + """Delete user server asset. + + Delete a user profile asset (avatar or cover image) using server credentials and remove its reference from the user profile. + This performs a soft delete by marking the asset as deleted and updating the user's profile. + """ + asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id) + asset.is_deleted = True + asset.deleted_at = timezone.now() + # get the entity and save the asset id for the request field + self.entity_asset_delete( + entity_type=asset.entity_type, asset=asset, request=request + ) + asset.save(update_fields=["is_deleted", "deleted_at"]) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class GenericAssetEndpoint(BaseAPIView): + """This endpoint is used to upload generic assets that can be later bound to entities.""" + + use_read_replica = True + + @asset_docs( + operation_id="get_generic_asset", + summary="Get presigned URL for asset download", + description="Get presigned URL for asset download", + parameters=[WORKSPACE_SLUG_PARAMETER], + responses={ + 200: ASSET_DOWNLOAD_SUCCESS_RESPONSE, + 400: ASSET_DOWNLOAD_ERROR_RESPONSE, + 404: ASSET_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, asset_id): + """Get presigned URL for asset download. + + Generate a presigned URL for downloading a generic asset. + The asset must be uploaded and associated with the specified workspace. + """ + try: + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + + # Get the asset + asset = FileAsset.objects.get( + id=asset_id, workspace_id=workspace.id, is_deleted=False + ) + + # Check if the asset exists and is uploaded + if not asset.is_uploaded: + return Response( + {"error": "Asset not yet uploaded"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Generate presigned URL for GET + storage = S3Storage(request=request, is_server=True) + presigned_url = storage.generate_presigned_url( + object_name=asset.asset.name, filename=asset.attributes.get("name") + ) + + return Response( + { + "asset_id": str(asset.id), + "asset_url": presigned_url, + "asset_name": asset.attributes.get("name", ""), + "asset_type": asset.attributes.get("type", ""), + }, + status=status.HTTP_200_OK, + ) + + except Workspace.DoesNotExist: + return Response( + {"error": "Workspace not found"}, status=status.HTTP_404_NOT_FOUND + ) + except FileAsset.DoesNotExist: + return Response( + {"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND + ) + except Exception as e: + log_exception(e) + return Response( + {"error": "Internal server error"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) + + @asset_docs( + operation_id="create_generic_asset_upload", + summary="Generate presigned URL for generic asset upload", + description="Generate presigned URL for generic asset upload", + parameters=[WORKSPACE_SLUG_PARAMETER], + request=OpenApiRequest( + request=GenericAssetUploadSerializer, + examples=[ + OpenApiExample( + "GenericAssetUploadSerializer", + value={ + "name": "image.jpg", + "type": "image/jpeg", + "size": 1024000, + "project_id": "123e4567-e89b-12d3-a456-426614174000", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for uploading a generic asset", + ), + ], + ), + responses={ + 200: GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE, + 400: GENERIC_ASSET_VALIDATION_ERROR_RESPONSE, + 404: NOT_FOUND_RESPONSE, + 409: ASSET_CONFLICT_RESPONSE, + }, + ) + def post(self, request, slug): + """Generate presigned URL for generic asset upload. + + Create a presigned URL for uploading generic assets that can be bound to entities like work items. + Supports various file types and includes external source tracking for integrations. + """ + name = request.data.get("name") + type = request.data.get("type") + size = int(request.data.get("size", settings.FILE_SIZE_LIMIT)) + project_id = request.data.get("project_id") + external_id = request.data.get("external_id") + external_source = request.data.get("external_source") + + # Check if the request is valid + if not name or not size: + return Response( + {"error": "Name and size are required fields.", "status": False}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Check if the file size is within the limit + size_limit = min(size, settings.FILE_SIZE_LIMIT) + + # Check if the file type is allowed + if not type or type not in settings.ATTACHMENT_MIME_TYPES: + return Response( + {"error": "Invalid file type.", "status": False}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + + # asset key + asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}" + + # Check for existing asset with same external details if provided + if external_id and external_source: + existing_asset = FileAsset.objects.filter( + workspace__slug=slug, + external_source=external_source, + external_id=external_id, + is_deleted=False, + ).first() + + if existing_asset: + return Response( + { + "message": "Asset with same external id and source already exists", + "asset_id": str(existing_asset.id), + "asset_url": existing_asset.asset_url, + }, + status=status.HTTP_409_CONFLICT, + ) + + # Create a File Asset + asset = FileAsset.objects.create( + attributes={"name": name, "type": type, "size": size_limit}, + asset=asset_key, + size=size_limit, + workspace_id=workspace.id, + project_id=project_id, + created_by=request.user, + external_id=external_id, + external_source=external_source, + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, # Using ISSUE_ATTACHMENT since we'll bind it to issues + ) + + # Get the presigned URL + storage = S3Storage(request=request, is_server=True) + presigned_url = storage.generate_presigned_post( + object_name=asset_key, file_type=type, file_size=size_limit + ) + + return Response( + { + "upload_data": presigned_url, + "asset_id": str(asset.id), + "asset_url": asset.asset_url, + }, + status=status.HTTP_200_OK, + ) + + @asset_docs( + operation_id="update_generic_asset", + summary="Update generic asset after upload completion", + description="Update generic asset after upload completion", + parameters=[WORKSPACE_SLUG_PARAMETER, ASSET_ID_PARAMETER], + request=OpenApiRequest( + request=GenericAssetUpdateSerializer, + examples=[ + OpenApiExample( + "GenericAssetUpdateSerializer", + value={"is_uploaded": True}, + description="Example request for updating a generic asset", + ) + ], + ), + responses={ + 204: ASSET_UPDATED_RESPONSE, + 404: ASSET_NOT_FOUND_RESPONSE, + }, + ) + def patch(self, request, slug, asset_id): + """Update generic asset after upload completion. + + Update the asset status after the file has been uploaded to S3. + This endpoint should be called after completing the S3 upload to mark the asset as uploaded + and trigger metadata extraction. + """ + try: + asset = FileAsset.objects.get( + id=asset_id, workspace__slug=slug, is_deleted=False + ) + + # Update is_uploaded status + asset.is_uploaded = request.data.get("is_uploaded", asset.is_uploaded) + + # Update storage metadata if not present + if not asset.storage_metadata: + get_asset_object_metadata.delay(asset_id=str(asset_id)) + + asset.save(update_fields=["is_uploaded"]) + + return Response(status=status.HTTP_204_NO_CONTENT) + except FileAsset.DoesNotExist: + return Response( + {"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND + ) diff --git a/apps/api/plane/api/views/base.py b/apps/api/plane/api/views/base.py index c79c2f853..ea5bcba02 100644 --- a/apps/api/plane/api/views/base.py +++ b/apps/api/plane/api/views/base.py @@ -13,13 +13,14 @@ from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response # Third party imports -from rest_framework.views import APIView +from rest_framework.generics import GenericAPIView # Module imports from plane.api.middleware.api_authentication import APIKeyAuthentication from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle from plane.utils.exception_logger import log_exception from plane.utils.paginator import BasePaginator +from plane.utils.core.mixins import ReadReplicaControlMixin class TimezoneMixin: @@ -36,11 +37,15 @@ class TimezoneMixin: timezone.deactivate() -class BaseAPIView(TimezoneMixin, APIView, BasePaginator): +class BaseAPIView( + TimezoneMixin, GenericAPIView, ReadReplicaControlMixin, BasePaginator +): authentication_classes = [APIKeyAuthentication] permission_classes = [IsAuthenticated] + use_read_replica = False + def filter_queryset(self, queryset): for backend in list(self.filter_backends): queryset = backend().filter_queryset(self.request, queryset, self) diff --git a/apps/api/plane/api/views/cycle.py b/apps/api/plane/api/views/cycle.py index 457671b93..e10d3d16e 100644 --- a/apps/api/plane/api/views/cycle.py +++ b/apps/api/plane/api/views/cycle.py @@ -23,9 +23,18 @@ from django.db import models # Third party imports from rest_framework import status from rest_framework.response import Response +from drf_spectacular.utils import OpenApiRequest, OpenApiResponse # Module imports -from plane.api.serializers import CycleIssueSerializer, CycleSerializer, IssueSerializer +from plane.api.serializers import ( + CycleIssueSerializer, + CycleSerializer, + CycleIssueRequestSerializer, + TransferCycleIssueRequestSerializer, + CycleCreateSerializer, + CycleUpdateSerializer, + IssueSerializer, +) from plane.app.permissions import ProjectEntityPermission from plane.bgtasks.issue_activities_task import issue_activity from plane.db.models import ( @@ -42,19 +51,42 @@ from plane.utils.analytics_plot import burndown_plot from plane.utils.host import base_host from .base import BaseAPIView from plane.bgtasks.webhook_task import model_activity +from plane.utils.openapi.decorators import cycle_docs +from plane.utils.openapi import ( + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + CYCLE_VIEW_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + CYCLE_CREATE_EXAMPLE, + CYCLE_UPDATE_EXAMPLE, + CYCLE_ISSUE_REQUEST_EXAMPLE, + TRANSFER_CYCLE_ISSUE_EXAMPLE, + # Response Examples + CYCLE_EXAMPLE, + CYCLE_ISSUE_EXAMPLE, + TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE, + TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE, + TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE, + DELETED_RESPONSE, + ARCHIVED_RESPONSE, + CYCLE_CANNOT_ARCHIVE_RESPONSE, + UNARCHIVED_RESPONSE, + REQUIRED_FIELDS_RESPONSE, +) -class CycleAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to cycle. - - """ +class CycleListCreateAPIEndpoint(BaseAPIView): + """Cycle List and Create Endpoint""" serializer_class = CycleSerializer model = Cycle webhook_event = "cycle" permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -136,17 +168,34 @@ class CycleAPIEndpoint(BaseAPIView): .distinct() ) - def get(self, request, slug, project_id, pk=None): + @cycle_docs( + operation_id="list_cycles", + summary="List cycles", + description="Retrieve all cycles in a project. Supports filtering by cycle status like current, upcoming, completed, or draft.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + CYCLE_VIEW_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + CycleSerializer, + "PaginatedCycleResponse", + "Paginated list of cycles", + "Paginated Cycles", + ), + }, + ) + def get(self, request, slug, project_id): + """List cycles + + Retrieve all cycles in a project. + Supports filtering by cycle status like current, upcoming, completed, or draft. + """ project = Project.objects.get(workspace__slug=slug, pk=project_id) - if pk: - queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) - data = CycleSerializer( - queryset, - fields=self.fields, - expand=self.expand, - context={"project": project}, - ).data - return Response(data, status=status.HTTP_200_OK) queryset = self.get_queryset().filter(archived_at__isnull=True) cycle_view = request.GET.get("cycle_view", "all") @@ -237,7 +286,28 @@ class CycleAPIEndpoint(BaseAPIView): ).data, ) + @cycle_docs( + operation_id="create_cycle", + summary="Create cycle", + description="Create a new development cycle with specified name, description, and date range. Supports external ID tracking for integration purposes.", + request=OpenApiRequest( + request=CycleCreateSerializer, + examples=[CYCLE_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Cycle created", + response=CycleSerializer, + examples=[CYCLE_EXAMPLE], + ), + }, + ) def post(self, request, slug, project_id): + """Create cycle + + Create a new development cycle with specified name, description, and date range. + Supports external ID tracking for integration purposes. + """ if ( request.data.get("start_date", None) is None and request.data.get("end_date", None) is None @@ -245,7 +315,7 @@ class CycleAPIEndpoint(BaseAPIView): request.data.get("start_date", None) is not None and request.data.get("end_date", None) is not None ): - serializer = CycleSerializer(data=request.data) + serializer = CycleCreateSerializer(data=request.data) if serializer.is_valid(): if ( request.data.get("external_id") @@ -274,13 +344,16 @@ class CycleAPIEndpoint(BaseAPIView): # Send the model activity model_activity.delay( model_name="cycle", - model_id=str(serializer.data["id"]), + model_id=str(serializer.instance.id), requested_data=request.data, current_instance=None, actor_id=request.user.id, slug=slug, origin=base_host(request=request, is_app=True), ) + + cycle = Cycle.objects.get(pk=serializer.instance.id) + serializer = CycleSerializer(cycle) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) else: @@ -291,7 +364,148 @@ class CycleAPIEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) + +class CycleDetailAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `retrieve`, `update` and `destroy` actions related to cycle. + """ + + serializer_class = CycleSerializer + model = Cycle + webhook_event = "cycle" + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + Cycle.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + issue_cycle__deleted_at__isnull=True, + ), + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @cycle_docs( + operation_id="retrieve_cycle", + summary="Retrieve cycle", + description="Retrieve details of a specific cycle by its ID. Supports cycle status filtering.", + responses={ + 200: OpenApiResponse( + description="Cycles", + response=CycleSerializer, + examples=[CYCLE_EXAMPLE], + ), + }, + ) + def get(self, request, slug, project_id, pk): + """List or retrieve cycles + + Retrieve all cycles in a project or get details of a specific cycle. + Supports filtering by cycle status like current, upcoming, completed, or draft. + """ + project = Project.objects.get(workspace__slug=slug, pk=project_id) + queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) + data = CycleSerializer( + queryset, + fields=self.fields, + expand=self.expand, + context={"project": project}, + ).data + return Response(data, status=status.HTTP_200_OK) + + @cycle_docs( + operation_id="update_cycle", + summary="Update cycle", + description="Modify an existing cycle's properties like name, description, or date range. Completed cycles can only have their sort order changed.", + request=OpenApiRequest( + request=CycleUpdateSerializer, + examples=[CYCLE_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Cycle updated", + response=CycleSerializer, + examples=[CYCLE_EXAMPLE], + ), + }, + ) def patch(self, request, slug, project_id, pk): + """Update cycle + + Modify an existing cycle's properties like name, description, or date range. + Completed cycles can only have their sort order changed. + """ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) current_instance = json.dumps( @@ -320,7 +534,7 @@ class CycleAPIEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - serializer = CycleSerializer(cycle, data=request.data, partial=True) + serializer = CycleUpdateSerializer(cycle, data=request.data, partial=True) if serializer.is_valid(): if ( request.data.get("external_id") @@ -346,17 +560,32 @@ class CycleAPIEndpoint(BaseAPIView): # Send the model activity model_activity.delay( model_name="cycle", - model_id=str(serializer.data["id"]), + model_id=str(serializer.instance.id), requested_data=request.data, current_instance=current_instance, actor_id=request.user.id, slug=slug, origin=base_host(request=request, is_app=True), ) + cycle = Cycle.objects.get(pk=serializer.instance.id) + serializer = CycleSerializer(cycle) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + @cycle_docs( + operation_id="delete_cycle", + summary="Delete cycle", + description="Permanently remove a cycle and all its associated issue relationships", + responses={ + 204: DELETED_RESPONSE, + }, + ) def delete(self, request, slug, project_id, pk): + """Delete cycle + + Permanently remove a cycle and all its associated issue relationships. + Only admins or the cycle creator can perform this action. + """ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) if cycle.owned_by_id != request.user.id and ( not ProjectMember.objects.filter( @@ -403,7 +632,10 @@ class CycleAPIEndpoint(BaseAPIView): class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): + """Cycle Archive and Unarchive Endpoint""" + permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -509,7 +741,27 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): .distinct() ) + @cycle_docs( + operation_id="list_archived_cycles", + description="Retrieve all cycles that have been archived in the project.", + summary="List archived cycles", + parameters=[CURSOR_PARAMETER, PER_PAGE_PARAMETER], + request={}, + responses={ + 200: create_paginated_response( + CycleSerializer, + "PaginatedArchivedCycleResponse", + "Paginated list of archived cycles", + "Paginated Archived Cycles", + ), + }, + ) def get(self, request, slug, project_id): + """List archived cycles + + Retrieve all cycles that have been archived in the project. + Returns paginated results with cycle statistics and completion data. + """ return self.paginate( request=request, queryset=(self.get_queryset()), @@ -518,7 +770,22 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): ).data, ) + @cycle_docs( + operation_id="archive_cycle", + summary="Archive cycle", + description="Move a completed cycle to archived status for historical tracking. Only cycles that have ended can be archived.", + request={}, + responses={ + 204: ARCHIVED_RESPONSE, + 400: CYCLE_CANNOT_ARCHIVE_RESPONSE, + }, + ) def post(self, request, slug, project_id, cycle_id): + """Archive cycle + + Move a completed cycle to archived status for historical tracking. + Only cycles that have ended can be archived. + """ cycle = Cycle.objects.get( pk=cycle_id, project_id=project_id, workspace__slug=slug ) @@ -537,7 +804,21 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): ).delete() return Response(status=status.HTTP_204_NO_CONTENT) + @cycle_docs( + operation_id="unarchive_cycle", + summary="Unarchive cycle", + description="Restore an archived cycle to active status, making it available for regular use.", + request={}, + responses={ + 204: UNARCHIVED_RESPONSE, + }, + ) def delete(self, request, slug, project_id, cycle_id): + """Unarchive cycle + + Restore an archived cycle to active status, making it available for regular use. + The cycle will reappear in active cycle lists. + """ cycle = Cycle.objects.get( pk=cycle_id, project_id=project_id, workspace__slug=slug ) @@ -546,18 +827,14 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): return Response(status=status.HTTP_204_NO_CONTENT) -class CycleIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, - and `destroy` actions related to cycle issues. - - """ +class CycleIssueListCreateAPIEndpoint(BaseAPIView): + """Cycle Issue List and Create Endpoint""" serializer_class = CycleIssueSerializer model = CycleIssue webhook_event = "cycle_issue" - bulk = True permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -583,20 +860,27 @@ class CycleIssueAPIEndpoint(BaseAPIView): .distinct() ) - def get(self, request, slug, project_id, cycle_id, issue_id=None): - # Get - if issue_id: - cycle_issue = CycleIssue.objects.get( - workspace__slug=slug, - project_id=project_id, - cycle_id=cycle_id, - issue_id=issue_id, - ) - serializer = CycleIssueSerializer( - cycle_issue, fields=self.fields, expand=self.expand - ) - return Response(serializer.data, status=status.HTTP_200_OK) + @cycle_docs( + operation_id="list_cycle_work_items", + summary="List cycle work items", + description="Retrieve all work items assigned to a cycle.", + parameters=[CURSOR_PARAMETER, PER_PAGE_PARAMETER], + request={}, + responses={ + 200: create_paginated_response( + CycleIssueSerializer, + "PaginatedCycleIssueResponse", + "Paginated list of cycle work items", + "Paginated Cycle Work Items", + ), + }, + ) + def get(self, request, slug, project_id, cycle_id): + """List or retrieve cycle work items + Retrieve all work items assigned to a cycle or get details of a specific cycle work item. + Returns paginated results with work item details, assignees, and labels. + """ # List order_by = request.GET.get("order_by", "created_at") issues = ( @@ -644,19 +928,41 @@ class CycleIssueAPIEndpoint(BaseAPIView): ).data, ) + @cycle_docs( + operation_id="add_cycle_work_items", + summary="Add Work Items to Cycle", + description="Assign multiple work items to a cycle. Automatically handles bulk creation and updates with activity tracking.", + request=OpenApiRequest( + request=CycleIssueRequestSerializer, + examples=[CYCLE_ISSUE_REQUEST_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Cycle work items added", + response=CycleIssueSerializer, + examples=[CYCLE_ISSUE_EXAMPLE], + ), + 400: REQUIRED_FIELDS_RESPONSE, + }, + ) def post(self, request, slug, project_id, cycle_id): + """Add cycle issues + + Assign multiple work items to a cycle or move them from another cycle. + Automatically handles bulk creation and updates with activity tracking. + """ issues = request.data.get("issues", []) if not issues: return Response( - {"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST + {"error": "Work items are required"}, status=status.HTTP_400_BAD_REQUEST ) cycle = Cycle.objects.get( workspace__slug=slug, project_id=project_id, pk=cycle_id ) - # Get all CycleIssues already created + # Get all CycleWorkItems already created cycle_issues = list( CycleIssue.objects.filter(~Q(cycle_id=cycle_id), issue_id__in=issues) ) @@ -730,7 +1036,88 @@ class CycleIssueAPIEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) + +class CycleIssueDetailAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, + and `destroy` actions related to cycle issues. + + """ + + serializer_class = CycleIssueSerializer + model = CycleIssue + webhook_event = "cycle_issue" + bulk = True + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + CycleIssue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(cycle_id=self.kwargs.get("cycle_id")) + .select_related("project") + .select_related("workspace") + .select_related("cycle") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @cycle_docs( + operation_id="retrieve_cycle_work_item", + summary="Retrieve cycle work item", + description="Retrieve details of a specific cycle work item.", + responses={ + 200: OpenApiResponse( + description="Cycle work items", + response=CycleIssueSerializer, + examples=[CYCLE_ISSUE_EXAMPLE], + ), + }, + ) + def get(self, request, slug, project_id, cycle_id, issue_id): + """Retrieve cycle work item + + Retrieve details of a specific cycle work item. + Returns paginated results with work item details, assignees, and labels. + """ + cycle_issue = CycleIssue.objects.get( + workspace__slug=slug, + project_id=project_id, + cycle_id=cycle_id, + issue_id=issue_id, + ) + serializer = CycleIssueSerializer( + cycle_issue, fields=self.fields, expand=self.expand + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + @cycle_docs( + operation_id="delete_cycle_work_item", + summary="Delete cycle work item", + description="Remove a work item from a cycle while keeping the work item in the project.", + responses={ + 204: DELETED_RESPONSE, + }, + ) def delete(self, request, slug, project_id, cycle_id, issue_id): + """Remove cycle work item + + Remove a work item from a cycle while keeping the work item in the project. + Records the removal activity for tracking purposes. + """ cycle_issue = CycleIssue.objects.get( issue_id=issue_id, workspace__slug=slug, @@ -764,7 +1151,54 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView): permission_classes = [ProjectEntityPermission] + @cycle_docs( + operation_id="transfer_cycle_work_items", + summary="Transfer cycle work items", + description="Move incomplete work items from the current cycle to a new target cycle. Captures progress snapshot and transfers only unfinished work items.", + request=OpenApiRequest( + request=TransferCycleIssueRequestSerializer, + examples=[TRANSFER_CYCLE_ISSUE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Work items transferred successfully", + response={ + "type": "object", + "properties": { + "message": { + "type": "string", + "description": "Success message", + "example": "Success", + }, + }, + }, + examples=[TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE], + ), + 400: OpenApiResponse( + description="Bad request", + response={ + "type": "object", + "properties": { + "error": { + "type": "string", + "description": "Error message", + "example": "New Cycle Id is required", + }, + }, + }, + examples=[ + TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE, + TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE, + ], + ), + }, + ) def post(self, request, slug, project_id, cycle_id): + """Transfer cycle issues + + Move incomplete issues from the current cycle to a new target cycle. + Captures progress snapshot and transfers only unfinished work items. + """ new_cycle_id = request.data.get("new_cycle_id", False) if not new_cycle_id: diff --git a/apps/api/plane/api/views/intake.py b/apps/api/plane/api/views/intake.py index 93acb0664..1ea9c73fd 100644 --- a/apps/api/plane/api/views/intake.py +++ b/apps/api/plane/api/views/intake.py @@ -12,30 +12,49 @@ from django.contrib.postgres.fields import ArrayField # Third party imports from rest_framework import status from rest_framework.response import Response +from drf_spectacular.utils import OpenApiResponse, OpenApiRequest # Module imports -from plane.api.serializers import IntakeIssueSerializer, IssueSerializer +from plane.api.serializers import ( + IntakeIssueSerializer, + IssueSerializer, + IntakeIssueCreateSerializer, + IntakeIssueUpdateSerializer, +) from plane.app.permissions import ProjectLitePermission from plane.bgtasks.issue_activities_task import issue_activity from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State from plane.utils.host import base_host from .base import BaseAPIView from plane.db.models.intake import SourceType +from plane.utils.openapi import ( + intake_docs, + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + INTAKE_ISSUE_CREATE_EXAMPLE, + INTAKE_ISSUE_UPDATE_EXAMPLE, + # Response Examples + INTAKE_ISSUE_EXAMPLE, + INVALID_REQUEST_RESPONSE, + DELETED_RESPONSE, +) -class IntakeIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to intake issues. - - """ - - permission_classes = [ProjectLitePermission] +class IntakeIssueListCreateAPIEndpoint(BaseAPIView): + """Intake Work Item List and Create Endpoint""" serializer_class = IntakeIssueSerializer - model = IntakeIssue - filterset_fields = ["status"] + model = Intake + permission_classes = [ProjectLitePermission] + use_read_replica = True def get_queryset(self): intake = Intake.objects.filter( @@ -61,13 +80,33 @@ class IntakeIssueAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ) - def get(self, request, slug, project_id, issue_id=None): - if issue_id: - intake_issue_queryset = self.get_queryset().get(issue_id=issue_id) - intake_issue_data = IntakeIssueSerializer( - intake_issue_queryset, fields=self.fields, expand=self.expand - ).data - return Response(intake_issue_data, status=status.HTTP_200_OK) + @intake_docs( + operation_id="get_intake_work_items_list", + summary="List intake work items", + description="Retrieve all work items in the project's intake queue. Returns paginated results when listing all intake work items.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IntakeIssueSerializer, + "PaginatedIntakeIssueResponse", + "Paginated list of intake work items", + "Paginated Intake Work Items", + ), + }, + ) + def get(self, request, slug, project_id): + """List intake work items + + Retrieve all work items in the project's intake queue. + Returns paginated results when listing all intake work items. + """ issue_queryset = self.get_queryset() return self.paginate( request=request, @@ -77,7 +116,33 @@ class IntakeIssueAPIEndpoint(BaseAPIView): ).data, ) + @intake_docs( + operation_id="create_intake_work_item", + summary="Create intake work item", + description="Submit a new work item to the project's intake queue for review and triage. Automatically creates the work item with default triage state and tracks activity.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IntakeIssueCreateSerializer, + examples=[INTAKE_ISSUE_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Intake work item created", + response=IntakeIssueSerializer, + examples=[INTAKE_ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Create intake work item + + Submit a new work item to the project's intake queue for review and triage. + Automatically creates the work item with default triage state and tracks activity. + """ if not request.data.get("issue", {}).get("name", False): return Response( {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST @@ -141,9 +206,100 @@ class IntakeIssueAPIEndpoint(BaseAPIView): ) serializer = IntakeIssueSerializer(intake_issue) - return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.data, status=status.HTTP_201_CREATED) + +class IntakeIssueDetailAPIEndpoint(BaseAPIView): + """Intake Issue API Endpoint""" + + permission_classes = [ProjectLitePermission] + + serializer_class = IntakeIssueSerializer + model = IntakeIssue + use_read_replica = True + + filterset_fields = ["status"] + + def get_queryset(self): + intake = Intake.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ).first() + + project = Project.objects.get( + workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id") + ) + + if intake is None and not project.intake_view: + return IntakeIssue.objects.none() + + return ( + IntakeIssue.objects.filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + intake_id=intake.id, + ) + .select_related("issue", "workspace", "project") + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + @intake_docs( + operation_id="retrieve_intake_work_item", + summary="Retrieve intake work item", + description="Retrieve details of a specific intake work item.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + ISSUE_ID_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Intake work item", + response=IntakeIssueSerializer, + examples=[INTAKE_ISSUE_EXAMPLE], + ), + }, + ) + def get(self, request, slug, project_id, issue_id): + """Retrieve intake work item + + Retrieve details of a specific intake work item. + """ + intake_issue_queryset = self.get_queryset().get(issue_id=issue_id) + intake_issue_data = IntakeIssueSerializer( + intake_issue_queryset, fields=self.fields, expand=self.expand + ).data + return Response(intake_issue_data, status=status.HTTP_200_OK) + + @intake_docs( + operation_id="update_intake_work_item", + summary="Update intake work item", + description="Modify an existing intake work item's properties or status for triage processing. Supports status changes like accept, reject, or mark as duplicate.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + ISSUE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IntakeIssueUpdateSerializer, + examples=[INTAKE_ISSUE_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Intake work item updated", + response=IntakeIssueSerializer, + examples=[INTAKE_ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + }, + ) def patch(self, request, slug, project_id, issue_id): + """Update intake work item + + Modify an existing intake work item's properties or status for triage processing. + Supports status changes like accept, reject, or mark as duplicate. + """ intake = Intake.objects.filter( workspace__slug=slug, project_id=project_id ).first() @@ -180,7 +336,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView): request.user.id ): return Response( - {"error": "You cannot edit intake issues"}, + {"error": "You cannot edit intake work items"}, status=status.HTTP_400_BAD_REQUEST, ) @@ -251,7 +407,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView): # Only project admins and members can edit intake issue attributes if project_member.role > 15: - serializer = IntakeIssueSerializer( + serializer = IntakeIssueUpdateSerializer( intake_issue, data=request.data, partial=True ) current_instance = json.dumps( @@ -301,7 +457,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView): origin=base_host(request=request, is_app=True), intake=str(intake_issue.id), ) - + serializer = IntakeIssueSerializer(intake_issue) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) else: @@ -309,7 +465,25 @@ class IntakeIssueAPIEndpoint(BaseAPIView): IntakeIssueSerializer(intake_issue).data, status=status.HTTP_200_OK ) + @intake_docs( + operation_id="delete_intake_work_item", + summary="Delete intake work item", + description="Permanently remove an intake work item from the triage queue. Also deletes the underlying work item if it hasn't been accepted yet.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + ISSUE_ID_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + }, + ) def delete(self, request, slug, project_id, issue_id): + """Delete intake work item + + Permanently remove an intake work item from the triage queue. + Also deletes the underlying work item if it hasn't been accepted yet. + """ intake = Intake.objects.filter( workspace__slug=slug, project_id=project_id ).first() @@ -349,7 +523,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView): ).exists() ): return Response( - {"error": "Only admin or creator can delete the issue"}, + {"error": "Only admin or creator can delete the work item"}, status=status.HTTP_403_FORBIDDEN, ) issue.delete() diff --git a/apps/api/plane/api/views/issue.py b/apps/api/plane/api/views/issue.py index 6a5016bec..489fa4a08 100644 --- a/apps/api/plane/api/views/issue.py +++ b/apps/api/plane/api/views/issue.py @@ -1,6 +1,7 @@ # Python imports import json import uuid +import re # Django imports from django.core.serializers.json import DjangoJSONEncoder @@ -26,6 +27,16 @@ from django.conf import settings from rest_framework import status from rest_framework.response import Response +# drf-spectacular imports +from drf_spectacular.utils import ( + extend_schema, + OpenApiParameter, + OpenApiResponse, + OpenApiExample, + OpenApiRequest, +) +from drf_spectacular.types import OpenApiTypes + # Module imports from plane.api.serializers import ( IssueAttachmentSerializer, @@ -34,6 +45,12 @@ from plane.api.serializers import ( IssueLinkSerializer, IssueSerializer, LabelSerializer, + IssueAttachmentUploadSerializer, + IssueSearchSerializer, + IssueCommentCreateSerializer, + IssueLinkCreateSerializer, + IssueLinkUpdateSerializer, + LabelCreateUpdateSerializer, ) from plane.app.permissions import ( ProjectEntityPermission, @@ -58,8 +75,92 @@ from plane.bgtasks.storage_metadata_task import get_asset_object_metadata from .base import BaseAPIView from plane.utils.host import base_host from plane.bgtasks.webhook_task import model_activity +from plane.app.permissions import ROLE +from plane.utils.openapi import ( + work_item_docs, + label_docs, + issue_link_docs, + issue_comment_docs, + issue_activity_docs, + issue_attachment_docs, + WORKSPACE_SLUG_PARAMETER, + PROJECT_IDENTIFIER_PARAMETER, + ISSUE_IDENTIFIER_PARAMETER, + PROJECT_ID_PARAMETER, + ISSUE_ID_PARAMETER, + LABEL_ID_PARAMETER, + COMMENT_ID_PARAMETER, + LINK_ID_PARAMETER, + ATTACHMENT_ID_PARAMETER, + ACTIVITY_ID_PARAMETER, + PROJECT_ID_QUERY_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + EXTERNAL_ID_PARAMETER, + EXTERNAL_SOURCE_PARAMETER, + ORDER_BY_PARAMETER, + SEARCH_PARAMETER, + SEARCH_PARAMETER_REQUIRED, + LIMIT_PARAMETER, + WORKSPACE_SEARCH_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + ISSUE_CREATE_EXAMPLE, + ISSUE_UPDATE_EXAMPLE, + ISSUE_UPSERT_EXAMPLE, + LABEL_CREATE_EXAMPLE, + LABEL_UPDATE_EXAMPLE, + ISSUE_LINK_CREATE_EXAMPLE, + ISSUE_LINK_UPDATE_EXAMPLE, + ISSUE_COMMENT_CREATE_EXAMPLE, + ISSUE_COMMENT_UPDATE_EXAMPLE, + ISSUE_ATTACHMENT_UPLOAD_EXAMPLE, + ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE, + # Response Examples + ISSUE_EXAMPLE, + LABEL_EXAMPLE, + ISSUE_LINK_EXAMPLE, + ISSUE_COMMENT_EXAMPLE, + ISSUE_ATTACHMENT_EXAMPLE, + ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE, + ISSUE_SEARCH_EXAMPLE, + WORK_ITEM_NOT_FOUND_RESPONSE, + ISSUE_NOT_FOUND_RESPONSE, + PROJECT_NOT_FOUND_RESPONSE, + EXTERNAL_ID_EXISTS_RESPONSE, + DELETED_RESPONSE, + ADMIN_ONLY_RESPONSE, + LABEL_NOT_FOUND_RESPONSE, + LABEL_NAME_EXISTS_RESPONSE, + INVALID_REQUEST_RESPONSE, + LINK_NOT_FOUND_RESPONSE, + COMMENT_NOT_FOUND_RESPONSE, + ATTACHMENT_NOT_FOUND_RESPONSE, + BAD_SEARCH_REQUEST_RESPONSE, + UNAUTHORIZED_RESPONSE, + FORBIDDEN_RESPONSE, + WORKSPACE_NOT_FOUND_RESPONSE, +) from plane.bgtasks.work_item_link_task import crawl_work_item_link_title +def user_has_issue_permission( + user_id, project_id, issue=None, allowed_roles=None, allow_creator=True +): + if allow_creator and issue is not None and user_id == issue.created_by_id: + return True + + qs = ProjectMember.objects.filter( + project_id=project_id, + member_id=user_id, + is_active=True, + ) + if allowed_roles is not None: + qs = qs.filter(role__in=allowed_roles) + + return qs.exists() + class WorkspaceIssueAPIEndpoint(BaseAPIView): """ @@ -71,10 +172,11 @@ class WorkspaceIssueAPIEndpoint(BaseAPIView): webhook_event = "issue" permission_classes = [ProjectEntityPermission] serializer_class = IssueSerializer + use_read_replica = True @property - def project__identifier(self): - return self.kwargs.get("project__identifier", None) + def project_identifier(self): + return self.kwargs.get("project_identifier", None) def get_queryset(self): return ( @@ -85,7 +187,7 @@ class WorkspaceIssueAPIEndpoint(BaseAPIView): .values("count") ) .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project__identifier=self.kwargs.get("project__identifier")) + .filter(project__identifier=self.kwargs.get("project_identifier")) .select_related("project") .select_related("workspace") .select_related("state") @@ -95,8 +197,32 @@ class WorkspaceIssueAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ).distinct() - def get(self, request, slug, project__identifier=None, issue__identifier=None): - if issue__identifier and project__identifier: + @extend_schema( + operation_id="get_workspace_work_item", + summary="Retrieve work item by identifiers", + description="Retrieve a specific work item using workspace slug, project identifier, and issue identifier.", + tags=["Work Items"], + parameters=[ + WORKSPACE_SLUG_PARAMETER, + PROJECT_IDENTIFIER_PARAMETER, + ISSUE_IDENTIFIER_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Work item details", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 404: WORK_ITEM_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_identifier=None, issue_identifier=None): + """Retrieve work item by identifiers + + Retrieve a specific work item using workspace slug, project identifier, and issue identifier. + This endpoint provides workspace-level access to work items. + """ + if issue_identifier and project_identifier: issue = Issue.issue_objects.annotate( sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) .order_by() @@ -104,8 +230,8 @@ class WorkspaceIssueAPIEndpoint(BaseAPIView): .values("count") ).get( workspace__slug=slug, - project__identifier=project__identifier, - sequence_id=issue__identifier, + project__identifier=project_identifier, + sequence_id=issue_identifier, ) return Response( IssueSerializer(issue, fields=self.fields, expand=self.expand).data, @@ -113,17 +239,16 @@ class WorkspaceIssueAPIEndpoint(BaseAPIView): ) -class IssueAPIEndpoint(BaseAPIView): +class IssueListCreateAPIEndpoint(BaseAPIView): """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to issue. - + This viewset provides `list` and `create` on issue level """ model = Issue webhook_event = "issue" permission_classes = [ProjectEntityPermission] serializer_class = IssueSerializer + use_read_replica = True def get_queryset(self): return ( @@ -144,7 +269,37 @@ class IssueAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ).distinct() - def get(self, request, slug, project_id, pk=None): + @work_item_docs( + operation_id="list_work_items", + summary="List work items", + description="Retrieve a paginated list of all work items in a project. Supports filtering, ordering, and field selection through query parameters.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + EXTERNAL_ID_PARAMETER, + EXTERNAL_SOURCE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueSerializer, + "PaginatedWorkItemResponse", + "Paginated list of work items", + "Paginated Work Items", + ), + 400: INVALID_REQUEST_RESPONSE, + 404: PROJECT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id): + """List work items + + Retrieve a paginated list of all work items in a project. + Supports filtering, ordering, and field selection through query parameters. + """ + external_id = request.GET.get("external_id") external_source = request.GET.get("external_source") @@ -160,18 +315,6 @@ class IssueAPIEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) - if pk: - issue = Issue.issue_objects.annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ).get(workspace__slug=slug, project_id=project_id, pk=pk) - return Response( - IssueSerializer(issue, fields=self.fields, expand=self.expand).data, - status=status.HTTP_200_OK, - ) - # Custom ordering for priority and state priority_order = ["urgent", "high", "medium", "low", "none"] state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] @@ -204,6 +347,10 @@ class IssueAPIEndpoint(BaseAPIView): ) ) + total_issue_queryset = Issue.issue_objects.filter( + project_id=project_id, workspace__slug=slug + ) + # Priority Ordering if order_by_param == "priority" or order_by_param == "-priority": priority_order = ( @@ -263,12 +410,37 @@ class IssueAPIEndpoint(BaseAPIView): return self.paginate( request=request, queryset=(issue_queryset), + total_count_queryset=total_issue_queryset, on_results=lambda issues: IssueSerializer( issues, many=True, fields=self.fields, expand=self.expand ).data, ) + @work_item_docs( + operation_id="create_work_item", + summary="Create work item", + description="Create a new work item in the specified project with the provided details.", + request=OpenApiRequest( + request=IssueSerializer, + examples=[ISSUE_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Work Item created successfully", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: PROJECT_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Create work item + + Create a new work item in the specified project with the provided details. + Supports external ID tracking for integration purposes. + """ project = Project.objects.get(pk=project_id) serializer = IssueSerializer( @@ -338,7 +510,104 @@ class IssueAPIEndpoint(BaseAPIView): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + +class IssueDetailAPIEndpoint(BaseAPIView): + """Issue Detail Endpoint""" + + model = Issue + webhook_event = "issue" + permission_classes = [ProjectEntityPermission] + serializer_class = IssueSerializer + use_read_replica = True + + def get_queryset(self): + return ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(self.kwargs.get("order_by", "-created_at")) + ).distinct() + + @work_item_docs( + operation_id="retrieve_work_item", + summary="Retrieve work item", + description="Retrieve details of a specific work item.", + parameters=[ + PROJECT_ID_PARAMETER, + EXTERNAL_ID_PARAMETER, + EXTERNAL_SOURCE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="List of issues or issue details", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: WORK_ITEM_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, pk): + """Retrieve work item + + Retrieve details of a specific work item. + Supports filtering, ordering, and field selection through query parameters. + """ + + issue = Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ).get(workspace__slug=slug, project_id=project_id, pk=pk) + return Response( + IssueSerializer(issue, fields=self.fields, expand=self.expand).data, + status=status.HTTP_200_OK, + ) + + @work_item_docs( + operation_id="put_work_item", + summary="Update or create work item", + description="Update an existing work item identified by external ID and source, or create a new one if it doesn't exist. Requires external_id and external_source parameters for identification.", + request=OpenApiRequest( + request=IssueSerializer, + examples=[ISSUE_UPSERT_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Work Item updated successfully", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 201: OpenApiResponse( + description="Work Item created successfully", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: WORK_ITEM_NOT_FOUND_RESPONSE, + }, + ) def put(self, request, slug, project_id): + """Update or create work item + + Update an existing work item identified by external ID and source, or create a new one if it doesn't exist. + Requires external_id and external_source parameters for identification. + """ # Get the entities required for putting the issue, external_id and # external_source are must to identify the issue here project = Project.objects.get(pk=project_id) @@ -448,7 +717,34 @@ class IssueAPIEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - def patch(self, request, slug, project_id, pk=None): + @work_item_docs( + operation_id="update_work_item", + summary="Partially update work item", + description="Partially update an existing work item with the provided fields. Supports external ID validation to prevent conflicts.", + parameters=[ + PROJECT_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueSerializer, + examples=[ISSUE_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Work Item patched successfully", + response=IssueSerializer, + examples=[ISSUE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: WORK_ITEM_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) + def patch(self, request, slug, project_id, pk): + """Update work item + + Partially update an existing work item with the provided fields. + Supports external ID validation to prevent conflicts. + """ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) project = Project.objects.get(pk=project_id) current_instance = json.dumps( @@ -495,7 +791,25 @@ class IssueAPIEndpoint(BaseAPIView): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - def delete(self, request, slug, project_id, pk=None): + @work_item_docs( + operation_id="delete_work_item", + summary="Delete work item", + description="Permanently delete an existing work item from the project. Only admins or the item creator can perform this action.", + parameters=[ + PROJECT_ID_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + 403: ADMIN_ONLY_RESPONSE, + 404: WORK_ITEM_NOT_FOUND_RESPONSE, + }, + ) + def delete(self, request, slug, project_id, pk): + """Delete work item + + Permanently delete an existing work item from the project. + Only admins or the item creator can perform this action. + """ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) if issue.created_by_id != request.user.id and ( not ProjectMember.objects.filter( @@ -507,7 +821,7 @@ class IssueAPIEndpoint(BaseAPIView): ).exists() ): return Response( - {"error": "Only admin or creator can delete the issue"}, + {"error": "Only admin or creator can delete the work item"}, status=status.HTTP_403_FORBIDDEN, ) current_instance = json.dumps( @@ -526,16 +840,13 @@ class IssueAPIEndpoint(BaseAPIView): return Response(status=status.HTTP_204_NO_CONTENT) -class LabelAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to the labels. - - """ +class LabelListCreateAPIEndpoint(BaseAPIView): + """Label List and Create Endpoint""" serializer_class = LabelSerializer model = Label permission_classes = [ProjectMemberPermission] + use_read_replica = True def get_queryset(self): return ( @@ -553,9 +864,31 @@ class LabelAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ) + @label_docs( + operation_id="create_label", + description="Create a new label in the specified project with name, color, and description.", + request=OpenApiRequest( + request=LabelCreateUpdateSerializer, + examples=[LABEL_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Label created successfully", + response=LabelSerializer, + examples=[LABEL_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 409: LABEL_NAME_EXISTS_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Create label + + Create a new label in the specified project with name, color, and description. + Supports external ID tracking for integration purposes. + """ try: - serializer = LabelSerializer(data=request.data) + serializer = LabelCreateUpdateSerializer(data=request.data) if serializer.is_valid(): if ( request.data.get("external_id") @@ -582,6 +915,8 @@ class LabelAPIEndpoint(BaseAPIView): ) serializer.save(project_id=project_id) + label = Label.objects.get(pk=serializer.instance.id) + serializer = LabelSerializer(label) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except IntegrityError: @@ -598,34 +933,116 @@ class LabelAPIEndpoint(BaseAPIView): status=status.HTTP_409_CONFLICT, ) - def get(self, request, slug, project_id, pk=None): - if pk is None: - return self.paginate( - request=request, - queryset=(self.get_queryset()), - on_results=lambda labels: LabelSerializer( - labels, many=True, fields=self.fields, expand=self.expand - ).data, - ) + @label_docs( + operation_id="list_labels", + description="Retrieve all labels in a project. Supports filtering by name and color.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + LabelSerializer, + "PaginatedLabelResponse", + "Paginated list of labels", + "Paginated Labels", + ), + 400: INVALID_REQUEST_RESPONSE, + 404: PROJECT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id): + """List labels + + Retrieve all labels in the project. + """ + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda labels: LabelSerializer( + labels, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + +class LabelDetailAPIEndpoint(LabelListCreateAPIEndpoint): + """Label Detail Endpoint""" + + serializer_class = LabelSerializer + model = Label + permission_classes = [ProjectMemberPermission] + use_read_replica = True + + @label_docs( + operation_id="get_labels", + description="Retrieve details of a specific label.", + parameters=[ + LABEL_ID_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Labels", + response=LabelSerializer, + examples=[LABEL_EXAMPLE], + ), + 404: LABEL_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, pk): + """Retrieve label + + Retrieve details of a specific label. + """ label = self.get_queryset().get(pk=pk) - serializer = LabelSerializer(label, fields=self.fields, expand=self.expand) + serializer = LabelSerializer(label) return Response(serializer.data, status=status.HTTP_200_OK) - def patch(self, request, slug, project_id, pk=None): + @label_docs( + operation_id="update_label", + description="Partially update an existing label's properties like name, color, or description.", + parameters=[ + LABEL_ID_PARAMETER, + ], + request=OpenApiRequest( + request=LabelCreateUpdateSerializer, + examples=[LABEL_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Label updated successfully", + response=LabelSerializer, + examples=[LABEL_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: LABEL_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) + def patch(self, request, slug, project_id, pk): + """Update label + + Partially update an existing label's properties like name, color, or description. + Validates external ID uniqueness if provided. + """ label = self.get_queryset().get(pk=pk) - serializer = LabelSerializer(label, data=request.data, partial=True) + serializer = LabelCreateUpdateSerializer(label, data=request.data, partial=True) if serializer.is_valid(): if ( str(request.data.get("external_id")) and (label.external_id != str(request.data.get("external_id"))) - and Issue.objects.filter( + and Label.objects.filter( project_id=project_id, workspace__slug=slug, external_source=request.data.get( "external_source", label.external_source ), external_id=request.data.get("external_id"), - ).exists() + ) + .exclude(id=pk) + .exists() ): return Response( { @@ -635,26 +1052,40 @@ class LabelAPIEndpoint(BaseAPIView): status=status.HTTP_409_CONFLICT, ) serializer.save() + label = Label.objects.get(pk=serializer.instance.id) + serializer = LabelSerializer(label) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - def delete(self, request, slug, project_id, pk=None): + @label_docs( + operation_id="delete_label", + description="Permanently remove a label from the project. This action cannot be undone.", + parameters=[ + LABEL_ID_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + 404: LABEL_NOT_FOUND_RESPONSE, + }, + ) + def delete(self, request, slug, project_id, pk): + """Delete label + + Permanently remove a label from the project. + This action cannot be undone. + """ label = self.get_queryset().get(pk=pk) label.delete() return Response(status=status.HTTP_204_NO_CONTENT) -class IssueLinkAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to the links of the particular issue. +class IssueLinkListCreateAPIEndpoint(BaseAPIView): + """Work Item Link List and Create Endpoint""" - """ - - permission_classes = [ProjectEntityPermission] - - model = IssueLink serializer_class = IssueLinkSerializer + model = IssueLink + permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -670,7 +1101,139 @@ class IssueLinkAPIEndpoint(BaseAPIView): .distinct() ) - def get(self, request, slug, project_id, issue_id, pk=None): + @issue_link_docs( + operation_id="list_work_item_links", + description="Retrieve all links associated with a work item. Supports filtering by URL, title, and metadata.", + parameters=[ + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueLinkSerializer, + "PaginatedIssueLinkResponse", + "Paginated list of work item links", + "Paginated Work Item Links", + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id): + """List work item links + + Retrieve all links associated with a work item. + """ + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda issue_links: IssueLinkSerializer( + issue_links, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + @issue_link_docs( + operation_id="create_work_item_link", + description="Add a new external link to a work item with URL, title, and metadata.", + parameters=[ + ISSUE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueLinkCreateSerializer, + examples=[ISSUE_LINK_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Work item link created successfully", + response=IssueLinkSerializer, + examples=[ISSUE_LINK_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + }, + ) + def post(self, request, slug, project_id, issue_id): + """Create issue link + + Add a new external link to a work item with URL, title, and metadata. + Automatically tracks link creation activity. + """ + serializer = IssueLinkCreateSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id, issue_id=issue_id) + crawl_work_item_link_title.delay( + serializer.instance.id, serializer.instance.url + ) + link = IssueLink.objects.get(pk=serializer.instance.id) + link.created_by_id = request.data.get("created_by", request.user.id) + link.save(update_fields=["created_by"]) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + actor_id=str(link.created_by_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + serializer = IssueLinkSerializer(link) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class IssueLinkDetailAPIEndpoint(BaseAPIView): + """Issue Link Detail Endpoint""" + + permission_classes = [ProjectEntityPermission] + + model = IssueLink + serializer_class = IssueLinkSerializer + use_read_replica = True + + def get_queryset(self): + return ( + IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(project__archived_at__isnull=True) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @issue_link_docs( + operation_id="retrieve_work_item_link", + description="Retrieve details of a specific work item link.", + parameters=[ + ISSUE_ID_PARAMETER, + LINK_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueLinkSerializer, + "PaginatedIssueLinkDetailResponse", + "Work item link details or paginated list", + "Work Item Link Details", + ), + 404: OpenApiResponse(description="Issue not found"), + }, + ) + def get(self, request, slug, project_id, issue_id, pk): + """Retrieve work item link + + Retrieve details of a specific work item link. + """ if pk is None: issue_links = self.get_queryset() serializer = IssueLinkSerializer( @@ -689,30 +1252,33 @@ class IssueLinkAPIEndpoint(BaseAPIView): ) return Response(serializer.data, status=status.HTTP_200_OK) - def post(self, request, slug, project_id, issue_id): - serializer = IssueLinkSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id, issue_id=issue_id) - crawl_work_item_link_title.delay( - serializer.data.get("id"), serializer.data.get("url") - ) - - link = IssueLink.objects.get(pk=serializer.data["id"]) - link.created_by_id = request.data.get("created_by", request.user.id) - link.save(update_fields=["created_by"]) - issue_activity.delay( - type="link.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - actor_id=str(link.created_by_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - + @issue_link_docs( + operation_id="update_issue_link", + description="Modify the URL, title, or metadata of an existing issue link.", + parameters=[ + ISSUE_ID_PARAMETER, + LINK_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueLinkUpdateSerializer, + examples=[ISSUE_LINK_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Issue link updated successfully", + response=IssueLinkSerializer, + examples=[ISSUE_LINK_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: LINK_NOT_FOUND_RESPONSE, + }, + ) def patch(self, request, slug, project_id, issue_id, pk): + """Update issue link + + Modify the URL, title, or metadata of an existing issue link. + Tracks all changes in issue activity logs. + """ issue_link = IssueLink.objects.get( workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk ) @@ -735,10 +1301,28 @@ class IssueLinkAPIEndpoint(BaseAPIView): current_instance=current_instance, epoch=int(timezone.now().timestamp()), ) + serializer = IssueLinkSerializer(issue_link) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + @issue_link_docs( + operation_id="delete_work_item_link", + description="Permanently remove an external link from a work item.", + parameters=[ + ISSUE_ID_PARAMETER, + LINK_ID_PARAMETER, + ], + responses={ + 204: OpenApiResponse(description="Work item link deleted successfully"), + 404: OpenApiResponse(description="Work item link not found"), + }, + ) def delete(self, request, slug, project_id, issue_id, pk): + """Delete work item link + + Permanently remove an external link from a work item. + Records deletion activity for audit purposes. + """ issue_link = IssueLink.objects.get( workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk ) @@ -758,17 +1342,14 @@ class IssueLinkAPIEndpoint(BaseAPIView): return Response(status=status.HTTP_204_NO_CONTENT) -class IssueCommentAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to comments of the particular issue. - - """ +class IssueCommentListCreateAPIEndpoint(BaseAPIView): + """Issue Comment List and Create Endpoint""" serializer_class = IssueCommentSerializer model = IssueComment webhook_event = "issue_comment" permission_classes = [ProjectLitePermission] + use_read_replica = True def get_queryset(self): return ( @@ -795,22 +1376,67 @@ class IssueCommentAPIEndpoint(BaseAPIView): .distinct() ) - def get(self, request, slug, project_id, issue_id, pk=None): - if pk: - issue_comment = self.get_queryset().get(pk=pk) - serializer = IssueCommentSerializer( - issue_comment, fields=self.fields, expand=self.expand - ) - return Response(serializer.data, status=status.HTTP_200_OK) + @issue_comment_docs( + operation_id="list_work_item_comments", + description="Retrieve all comments for a work item.", + parameters=[ + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueCommentSerializer, + "PaginatedIssueCommentResponse", + "Paginated list of work item comments", + "Paginated Work Item Comments", + ), + 404: OpenApiResponse(description="Issue not found"), + }, + ) + def get(self, request, slug, project_id, issue_id): + """List work item comments + + Retrieve all comments for a work item. + """ return self.paginate( request=request, queryset=(self.get_queryset()), - on_results=lambda issue_comment: IssueCommentSerializer( - issue_comment, many=True, fields=self.fields, expand=self.expand + on_results=lambda issue_comments: IssueCommentSerializer( + issue_comments, many=True, fields=self.fields, expand=self.expand ).data, ) + @issue_comment_docs( + operation_id="create_work_item_comment", + description="Add a new comment to a work item with HTML content.", + parameters=[ + ISSUE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueCommentCreateSerializer, + examples=[ISSUE_COMMENT_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Work item comment created successfully", + response=IssueCommentSerializer, + examples=[ISSUE_COMMENT_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) def post(self, request, slug, project_id, issue_id): + """Create work item comment + + Add a new comment to a work item with HTML content. + Supports external ID tracking for integration purposes. + """ # Validation check if the issue already exists if ( request.data.get("external_id") @@ -830,18 +1456,18 @@ class IssueCommentAPIEndpoint(BaseAPIView): ).first() return Response( { - "error": "Issue Comment with the same external id and external source already exists", + "error": "Work item comment with the same external id and external source already exists", "id": str(issue_comment.id), }, status=status.HTTP_409_CONFLICT, ) - serializer = IssueCommentSerializer(data=request.data) + serializer = IssueCommentCreateSerializer(data=request.data) if serializer.is_valid(): serializer.save( project_id=project_id, issue_id=issue_id, actor=request.user ) - issue_comment = IssueComment.objects.get(pk=serializer.data.get("id")) + issue_comment = IssueComment.objects.get(pk=serializer.instance.id) # Update the created_at and the created_by and save the comment issue_comment.created_at = request.data.get("created_at", timezone.now()) issue_comment.created_by_id = request.data.get( @@ -858,20 +1484,113 @@ class IssueCommentAPIEndpoint(BaseAPIView): current_instance=None, epoch=int(timezone.now().timestamp()), ) + # Send the model activity model_activity.delay( model_name="issue_comment", - model_id=str(serializer.data["id"]), + model_id=str(serializer.instance.id), requested_data=request.data, current_instance=None, actor_id=request.user.id, slug=slug, origin=base_host(request=request, is_app=True), ) + + serializer = IssueCommentSerializer(issue_comment) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + +class IssueCommentDetailAPIEndpoint(BaseAPIView): + """Work Item Comment Detail Endpoint""" + + serializer_class = IssueCommentSerializer + model = IssueComment + webhook_event = "issue_comment" + permission_classes = [ProjectLitePermission] + use_read_replica = True + + def get_queryset(self): + return ( + IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(project__archived_at__isnull=True) + .select_related("workspace", "project", "issue", "actor") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @issue_comment_docs( + operation_id="retrieve_work_item_comment", + description="Retrieve details of a specific comment.", + parameters=[ + ISSUE_ID_PARAMETER, + COMMENT_ID_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Work item comments", + response=IssueCommentSerializer, + examples=[ISSUE_COMMENT_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id, pk): + """Retrieve issue comment + + Retrieve details of a specific comment. + """ + issue_comment = self.get_queryset().get(pk=pk) + serializer = IssueCommentSerializer( + issue_comment, fields=self.fields, expand=self.expand + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + @issue_comment_docs( + operation_id="update_work_item_comment", + description="Modify the content of an existing comment on a work item.", + parameters=[ + ISSUE_ID_PARAMETER, + COMMENT_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueCommentCreateSerializer, + examples=[ISSUE_COMMENT_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Work item comment updated successfully", + response=IssueCommentSerializer, + examples=[ISSUE_COMMENT_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: COMMENT_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) def patch(self, request, slug, project_id, issue_id, pk): + """Update work item comment + + Modify the content of an existing comment on a work item. + Validates external ID uniqueness if provided. + """ issue_comment = IssueComment.objects.get( workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk ) @@ -895,13 +1614,13 @@ class IssueCommentAPIEndpoint(BaseAPIView): ): return Response( { - "error": "Issue Comment with the same external id and external source already exists", + "error": "Work item comment with the same external id and external source already exists", "id": str(issue_comment.id), }, status=status.HTTP_409_CONFLICT, ) - serializer = IssueCommentSerializer( + serializer = IssueCommentCreateSerializer( issue_comment, data=request.data, partial=True ) if serializer.is_valid(): @@ -925,10 +1644,30 @@ class IssueCommentAPIEndpoint(BaseAPIView): slug=slug, origin=base_host(request=request, is_app=True), ) + + issue_comment = IssueComment.objects.get(pk=serializer.instance.id) + serializer = IssueCommentSerializer(issue_comment) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + @issue_comment_docs( + operation_id="delete_work_item_comment", + description="Permanently remove a comment from a work item. Records deletion activity for audit purposes.", + parameters=[ + ISSUE_ID_PARAMETER, + COMMENT_ID_PARAMETER, + ], + responses={ + 204: OpenApiResponse(description="Work item comment deleted successfully"), + 404: COMMENT_NOT_FOUND_RESPONSE, + }, + ) def delete(self, request, slug, project_id, issue_id, pk): + """Delete issue comment + + Permanently remove a comment from a work item. + Records deletion activity for audit purposes. + """ issue_comment = IssueComment.objects.get( workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk ) @@ -948,10 +1687,38 @@ class IssueCommentAPIEndpoint(BaseAPIView): return Response(status=status.HTTP_204_NO_CONTENT) -class IssueActivityAPIEndpoint(BaseAPIView): +class IssueActivityListAPIEndpoint(BaseAPIView): permission_classes = [ProjectEntityPermission] + use_read_replica = True - def get(self, request, slug, project_id, issue_id, pk=None): + @issue_activity_docs( + operation_id="list_work_item_activities", + description="Retrieve all activities for a work item. Supports filtering by activity type and date range.", + parameters=[ + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueActivitySerializer, + "PaginatedIssueActivityResponse", + "Paginated list of issue activities", + "Paginated Issue Activities", + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id): + """List issue activities + + Retrieve chronological activity logs for an issue. + Excludes comment, vote, reaction, and draft activities. + """ issue_activities = ( IssueActivity.objects.filter( issue_id=issue_id, workspace__slug=slug, project_id=project_id @@ -965,10 +1732,62 @@ class IssueActivityAPIEndpoint(BaseAPIView): .select_related("actor", "workspace", "issue", "project") ).order_by(request.GET.get("order_by", "created_at")) - if pk: - issue_activities = issue_activities.get(pk=pk) - serializer = IssueActivitySerializer(issue_activities) - return Response(serializer.data, status=status.HTTP_200_OK) + return self.paginate( + request=request, + queryset=(issue_activities), + on_results=lambda issue_activity: IssueActivitySerializer( + issue_activity, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + +class IssueActivityDetailAPIEndpoint(BaseAPIView): + """Issue Activity Detail Endpoint""" + + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + @issue_activity_docs( + operation_id="retrieve_work_item_activity", + description="Retrieve details of a specific activity.", + parameters=[ + ISSUE_ID_PARAMETER, + ACTIVITY_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueActivitySerializer, + "PaginatedIssueActivityDetailResponse", + "Paginated list of work item activities", + "Work Item Activity Details", + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ISSUE_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id, pk): + """Retrieve issue activity + + Retrieve details of a specific activity. + Excludes comment, vote, reaction, and draft activities. + """ + issue_activities = ( + IssueActivity.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(project__archived_at__isnull=True) + .select_related("actor", "workspace", "issue", "project") + ).order_by(request.GET.get("order_by", "created_at")) return self.paginate( request=request, @@ -979,12 +1798,109 @@ class IssueActivityAPIEndpoint(BaseAPIView): ) -class IssueAttachmentEndpoint(BaseAPIView): - serializer_class = IssueAttachmentSerializer - permission_classes = [ProjectEntityPermission] - model = FileAsset +class IssueAttachmentListCreateAPIEndpoint(BaseAPIView): + """Issue Attachment List and Create Endpoint""" + serializer_class = IssueAttachmentSerializer + model = FileAsset + use_read_replica = True + + @issue_attachment_docs( + operation_id="create_work_item_attachment", + description="Generate presigned URL for uploading file attachments to a work item.", + parameters=[ + ISSUE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=IssueAttachmentUploadSerializer, + examples=[ISSUE_ATTACHMENT_UPLOAD_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Presigned download URL generated successfully", + examples=[ + OpenApiExample( + name="Work Item Attachment Response", + value={ + "upload_data": { + "url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url", + "fields": { + "key": "file.pdf", + "AWSAccessKeyId": "AKIAIOSFODNN7EXAMPLE", + "policy": "EXAMPLE", + "signature": "EXAMPLE", + "acl": "public-read", + "Content-Type": "application/pdf", + }, + }, + "asset_id": "550e8400-e29b-41d4-a716-446655440000", + "asset_url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url", + "attachment": { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "file.pdf", + "type": "application/pdf", + "size": 1234567890, + "url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url", + }, + }, + ) + ], + ), + 400: OpenApiResponse( + description="Validation error", + examples=[ + OpenApiExample( + name="Missing required fields", + value={ + "error": "Name and size are required fields.", + "status": False, + }, + ), + OpenApiExample( + name="Invalid file type", + value={"error": "Invalid file type.", "status": False}, + ), + ], + ), + 404: OpenApiResponse( + description="Issue or Project or Workspace not found", + examples=[ + OpenApiExample( + name="Workspace not found", + value={"error": "Workspace not found"}, + ), + OpenApiExample( + name="Project not found", value={"error": "Project not found"} + ), + OpenApiExample( + name="Issue not found", value={"error": "Issue not found"} + ), + ], + ), + }, + ) def post(self, request, slug, project_id, issue_id): + """Create work item attachment + + Generate presigned URL for uploading file attachments to a work item. + Validates file type and size before creating the attachment record. + """ + issue = Issue.objects.get( + pk=issue_id, workspace__slug=slug, project_id=project_id + ) + # if the user is creator or admin,member then allow the upload + if not user_has_issue_permission( + request.user.id, + project_id=project_id, + issue=issue, + allowed_roles=[ROLE.ADMIN.value, ROLE.MEMBER.value], + allow_creator=True, + ): + return Response( + {"error": "You are not allowed to upload this attachment"}, + status=status.HTTP_403_FORBIDDEN, + ) + name = request.data.get("name") type = request.data.get("type", False) size = request.data.get("size") @@ -1071,7 +1987,82 @@ class IssueAttachmentEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) + @issue_attachment_docs( + operation_id="list_work_item_attachments", + description="Retrieve all attachments for a work item.", + parameters=[ + ISSUE_ID_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Work item attachment", + response=IssueAttachmentSerializer, + examples=[ISSUE_ATTACHMENT_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ATTACHMENT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id): + """List issue attachments + + List all attachments for an issue. + """ + # Get all the attachments + issue_attachments = FileAsset.objects.filter( + issue_id=issue_id, + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + workspace__slug=slug, + project_id=project_id, + is_uploaded=True, + ) + # Serialize the attachments + serializer = IssueAttachmentSerializer(issue_attachments, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class IssueAttachmentDetailAPIEndpoint(BaseAPIView): + """Issue Attachment Detail Endpoint""" + + serializer_class = IssueAttachmentSerializer + model = FileAsset + use_read_replica = True + + @issue_attachment_docs( + operation_id="delete_work_item_attachment", + description="Permanently remove an attachment from a work item. Records deletion activity for audit purposes.", + parameters=[ + ATTACHMENT_ID_PARAMETER, + ], + responses={ + 204: OpenApiResponse( + description="Work item attachment deleted successfully" + ), + 404: ATTACHMENT_NOT_FOUND_RESPONSE, + }, + ) def delete(self, request, slug, project_id, issue_id, pk): + """Delete work item attachment + + Soft delete an attachment from a work item by marking it as deleted. + Records deletion activity and triggers metadata cleanup. + """ + issue = Issue.objects.get( + pk=issue_id, workspace__slug=slug, project_id=project_id + ) + # if the request user is creator or admin then delete the attachment + if not user_has_issue_permission( + request.user, + project_id=project_id, + issue=issue, + allowed_roles=[ROLE.ADMIN.value], + allow_creator=True, + ): + return Response( + {"error": "You are not allowed to delete this attachment"}, + status=status.HTTP_403_FORBIDDEN, + ) + issue_attachment = FileAsset.objects.get( pk=pk, workspace__slug=slug, project_id=project_id ) @@ -1097,41 +2088,127 @@ class IssueAttachmentEndpoint(BaseAPIView): issue_attachment.save() return Response(status=status.HTTP_204_NO_CONTENT) - def get(self, request, slug, project_id, issue_id, pk=None): - if pk: - # Get the asset - asset = FileAsset.objects.get( - id=pk, workspace__slug=slug, project_id=project_id - ) + @issue_attachment_docs( + operation_id="retrieve_work_item_attachment", + description="Download attachment file. Returns a redirect to the presigned download URL.", + parameters=[ + ATTACHMENT_ID_PARAMETER, + ], + responses={ + 302: OpenApiResponse( + description="Redirect to presigned download URL", + ), + 400: OpenApiResponse( + description="Asset not uploaded", + response={ + "type": "object", + "properties": { + "error": { + "type": "string", + "description": "Error message", + "example": "The asset is not uploaded.", + }, + "status": { + "type": "boolean", + "description": "Request status", + "example": False, + }, + }, + }, + examples=[ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE], + ), + 404: ATTACHMENT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, project_id, issue_id, pk): + """Retrieve work item attachment - # Check if the asset is uploaded - if not asset.is_uploaded: - return Response( - {"error": "The asset is not uploaded.", "status": False}, - status=status.HTTP_400_BAD_REQUEST, - ) - - storage = S3Storage(request=request) - presigned_url = storage.generate_presigned_url( - object_name=asset.asset.name, - disposition="attachment", - filename=asset.attributes.get("name"), - ) - return HttpResponseRedirect(presigned_url) - - # Get all the attachments - issue_attachments = FileAsset.objects.filter( - issue_id=issue_id, - entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, - workspace__slug=slug, + Retrieve details of a specific attachment. + """ + # if the user is part of the project then allow the download + if not user_has_issue_permission( + request.user, project_id=project_id, - is_uploaded=True, - ) - # Serialize the attachments - serializer = IssueAttachmentSerializer(issue_attachments, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) + issue=None, + allowed_roles=None, + allow_creator=False, + ): + return Response( + {"error": "You are not allowed to download this attachment"}, + status=status.HTTP_403_FORBIDDEN, + ) + # Get the asset + asset = FileAsset.objects.get( + id=pk, workspace__slug=slug, project_id=project_id + ) + + # Check if the asset is uploaded + if not asset.is_uploaded: + return Response( + {"error": "The asset is not uploaded.", "status": False}, + status=status.HTTP_400_BAD_REQUEST, + ) + + storage = S3Storage(request=request) + presigned_url = storage.generate_presigned_url( + object_name=asset.asset.name, + disposition="attachment", + filename=asset.attributes.get("name"), + ) + return HttpResponseRedirect(presigned_url) + + @issue_attachment_docs( + operation_id="upload_work_item_attachment", + description="Mark an attachment as uploaded after successful file transfer to storage.", + parameters=[ + ATTACHMENT_ID_PARAMETER, + ], + request=OpenApiRequest( + request={ + "application/json": { + "type": "object", + "properties": { + "is_uploaded": { + "type": "boolean", + "description": "Mark attachment as uploaded", + } + }, + } + }, + examples=[ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE], + ), + responses={ + 204: OpenApiResponse( + description="Work item attachment uploaded successfully" + ), + 400: INVALID_REQUEST_RESPONSE, + 404: ATTACHMENT_NOT_FOUND_RESPONSE, + }, + ) def patch(self, request, slug, project_id, issue_id, pk): + """Confirm attachment upload + + Mark an attachment as uploaded after successful file transfer to storage. + Triggers activity logging and metadata extraction. + """ + + issue = Issue.objects.get( + pk=issue_id, workspace__slug=slug, project_id=project_id + ) + # if the user is creator or admin then allow the upload + if not user_has_issue_permission( + request.user, + project_id=project_id, + issue=issue, + allowed_roles=[ROLE.ADMIN.value, ROLE.MEMBER.value], + allow_creator=True, + ): + return Response( + {"error": "You are not allowed to upload this attachment"}, + status=status.HTTP_403_FORBIDDEN, + ) + issue_attachment = FileAsset.objects.get( pk=pk, workspace__slug=slug, project_id=project_id ) @@ -1160,3 +2237,83 @@ class IssueAttachmentEndpoint(BaseAPIView): get_asset_object_metadata.delay(str(issue_attachment.id)) issue_attachment.save() return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueSearchEndpoint(BaseAPIView): + """Endpoint to search across multiple fields in the issues""" + + use_read_replica = True + + @extend_schema( + operation_id="search_work_items", + tags=["Work Items"], + description="Perform semantic search across issue names, sequence IDs, and project identifiers.", + parameters=[ + WORKSPACE_SLUG_PARAMETER, + SEARCH_PARAMETER_REQUIRED, + LIMIT_PARAMETER, + WORKSPACE_SEARCH_PARAMETER, + PROJECT_ID_QUERY_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Work item search results", + response=IssueSearchSerializer, + examples=[ISSUE_SEARCH_EXAMPLE], + ), + 400: BAD_SEARCH_REQUEST_RESPONSE, + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: WORKSPACE_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug): + """Search work items + + Perform semantic search across work item names, sequence IDs, and project identifiers. + Supports workspace-wide or project-specific search with configurable result limits. + """ + query = request.query_params.get("search", False) + limit = request.query_params.get("limit", 10) + workspace_search = request.query_params.get("workspace_search", "false") + project_id = request.query_params.get("project_id", False) + + if not query: + return Response({"issues": []}, status=status.HTTP_200_OK) + + # Build search query + fields = ["name", "sequence_id", "project__identifier"] + q = Q() + for field in fields: + if field == "sequence_id": + # Match whole integers only (exclude decimal numbers) + sequences = re.findall(r"\b\d+\b", query) + for sequence_id in sequences: + q |= Q(**{"sequence_id": sequence_id}) + else: + q |= Q(**{f"{field}__icontains": query}) + + # Filter issues + issues = Issue.issue_objects.filter( + q, + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + workspace__slug=slug, + ) + + # Apply project filter if not searching across workspace + if workspace_search == "false" and project_id: + issues = issues.filter(project_id=project_id) + + # Get results + issue_results = issues.distinct().values( + "name", + "id", + "sequence_id", + "project__identifier", + "project_id", + "workspace__slug", + )[: int(limit)] + + return Response({"issues": issue_results}, status=status.HTTP_200_OK) diff --git a/apps/api/plane/api/views/member.py b/apps/api/plane/api/views/member.py index 954ee030b..8ae662520 100644 --- a/apps/api/plane/api/views/member.py +++ b/apps/api/plane/api/views/member.py @@ -1,29 +1,122 @@ -# Python imports -import uuid - -# Django imports -from django.contrib.auth.hashers import make_password -from django.core.validators import validate_email -from django.core.exceptions import ValidationError - # Third Party imports from rest_framework.response import Response from rest_framework import status +from drf_spectacular.utils import ( + extend_schema, + OpenApiResponse, +) # Module imports from .base import BaseAPIView from plane.api.serializers import UserLiteSerializer -from plane.db.models import User, Workspace, Project, WorkspaceMember, ProjectMember +from plane.db.models import User, Workspace, WorkspaceMember, ProjectMember +from plane.app.permissions import ProjectMemberPermission, WorkSpaceAdminPermission +from plane.utils.openapi import ( + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + UNAUTHORIZED_RESPONSE, + FORBIDDEN_RESPONSE, + WORKSPACE_NOT_FOUND_RESPONSE, + PROJECT_NOT_FOUND_RESPONSE, + WORKSPACE_MEMBER_EXAMPLE, + PROJECT_MEMBER_EXAMPLE, +) -from plane.app.permissions import ProjectMemberPermission + +class WorkspaceMemberAPIEndpoint(BaseAPIView): + permission_classes = [WorkSpaceAdminPermission] + use_read_replica = True + + @extend_schema( + operation_id="get_workspace_members", + summary="List workspace members", + description="Retrieve all users who are members of the specified workspace.", + tags=["Members"], + parameters=[WORKSPACE_SLUG_PARAMETER], + responses={ + 200: OpenApiResponse( + description="List of workspace members with their roles", + response={ + "type": "array", + "items": { + "allOf": [ + {"$ref": "#/components/schemas/UserLite"}, + { + "type": "object", + "properties": { + "role": { + "type": "integer", + "description": "Member role in the workspace", + } + }, + }, + ] + }, + }, + examples=[WORKSPACE_MEMBER_EXAMPLE], + ), + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: WORKSPACE_NOT_FOUND_RESPONSE, + }, + ) + # Get all the users that are present inside the workspace + def get(self, request, slug): + """List workspace members + + Retrieve all users who are members of the specified workspace. + Returns user profiles with their respective workspace roles and permissions. + """ + # Check if the workspace exists + if not Workspace.objects.filter(slug=slug).exists(): + return Response( + {"error": "Provided workspace does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_members = WorkspaceMember.objects.filter( + workspace__slug=slug + ).select_related("member") + + # Get all the users with their roles + users_with_roles = [] + for workspace_member in workspace_members: + user_data = UserLiteSerializer(workspace_member.member).data + user_data["role"] = workspace_member.role + users_with_roles.append(user_data) + + return Response(users_with_roles, status=status.HTTP_200_OK) # API endpoint to get and insert users inside the workspace class ProjectMemberAPIEndpoint(BaseAPIView): permission_classes = [ProjectMemberPermission] + use_read_replica = True + @extend_schema( + operation_id="get_project_members", + summary="List project members", + description="Retrieve all users who are members of the specified project.", + tags=["Members"], + parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + responses={ + 200: OpenApiResponse( + description="List of project members with their roles", + response=UserLiteSerializer, + examples=[PROJECT_MEMBER_EXAMPLE], + ), + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: PROJECT_NOT_FOUND_RESPONSE, + }, + ) # Get all the users that are present inside the workspace def get(self, request, slug, project_id): + """List project members + + Retrieve all users who are members of the specified project. + Returns user profiles with their project-specific roles and access levels. + """ # Check if the workspace exists if not Workspace.objects.filter(slug=slug).exists(): return Response( @@ -42,91 +135,3 @@ class ProjectMemberAPIEndpoint(BaseAPIView): ).data return Response(users, status=status.HTTP_200_OK) - - # Insert a new user inside the workspace, and assign the user to the project - def post(self, request, slug, project_id): - # Check if user with email already exists, and send bad request if it's - # not present, check for workspace and valid project mandat - # ------------------- Validation ------------------- - if ( - request.data.get("email") is None - or request.data.get("display_name") is None - ): - return Response( - { - "error": "Expected email, display_name, workspace_slug, project_id, one or more of the fields are missing." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - email = request.data.get("email") - - try: - validate_email(email) - except ValidationError: - return Response( - {"error": "Invalid email provided"}, status=status.HTTP_400_BAD_REQUEST - ) - - workspace = Workspace.objects.filter(slug=slug).first() - project = Project.objects.filter(pk=project_id).first() - - if not all([workspace, project]): - return Response( - {"error": "Provided workspace or project does not exist"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Check if user exists - user = User.objects.filter(email=email).first() - workspace_member = None - project_member = None - - if user: - # Check if user is part of the workspace - workspace_member = WorkspaceMember.objects.filter( - workspace=workspace, member=user - ).first() - if workspace_member: - # Check if user is part of the project - project_member = ProjectMember.objects.filter( - project=project, member=user - ).first() - if project_member: - return Response( - {"error": "User is already part of the workspace and project"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # If user does not exist, create the user - if not user: - user = User.objects.create( - email=email, - display_name=request.data.get("display_name"), - first_name=request.data.get("first_name", ""), - last_name=request.data.get("last_name", ""), - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - is_active=False, - ) - user.save() - - # Create a workspace member for the user if not already a member - if not workspace_member: - workspace_member = WorkspaceMember.objects.create( - workspace=workspace, member=user, role=request.data.get("role", 5) - ) - workspace_member.save() - - # Create a project member for the user if not already a member - if not project_member: - project_member = ProjectMember.objects.create( - project=project, member=user, role=request.data.get("role", 5) - ) - project_member.save() - - # Serialize the user and return the response - user_data = UserLiteSerializer(user).data - - return Response(user_data, status=status.HTTP_201_CREATED) diff --git a/apps/api/plane/api/views/module.py b/apps/api/plane/api/views/module.py index 9995bb806..63112cd66 100644 --- a/apps/api/plane/api/views/module.py +++ b/apps/api/plane/api/views/module.py @@ -10,12 +10,16 @@ from django.core.serializers.json import DjangoJSONEncoder # Third party imports from rest_framework import status from rest_framework.response import Response +from drf_spectacular.utils import OpenApiResponse, OpenApiExample, OpenApiRequest # Module imports from plane.api.serializers import ( IssueSerializer, ModuleIssueSerializer, ModuleSerializer, + ModuleIssueRequestSerializer, + ModuleCreateSerializer, + ModuleUpdateSerializer, ) from plane.app.permissions import ProjectEntityPermission from plane.bgtasks.issue_activities_task import issue_activity @@ -34,19 +38,49 @@ from plane.db.models import ( from .base import BaseAPIView from plane.bgtasks.webhook_task import model_activity from plane.utils.host import base_host +from plane.utils.openapi import ( + module_docs, + module_issue_docs, + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + MODULE_ID_PARAMETER, + MODULE_PK_PARAMETER, + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + MODULE_CREATE_EXAMPLE, + MODULE_UPDATE_EXAMPLE, + MODULE_ISSUE_REQUEST_EXAMPLE, + # Response Examples + MODULE_EXAMPLE, + MODULE_ISSUE_EXAMPLE, + INVALID_REQUEST_RESPONSE, + PROJECT_NOT_FOUND_RESPONSE, + EXTERNAL_ID_EXISTS_RESPONSE, + MODULE_NOT_FOUND_RESPONSE, + DELETED_RESPONSE, + ADMIN_ONLY_RESPONSE, + REQUIRED_FIELDS_RESPONSE, + MODULE_ISSUE_NOT_FOUND_RESPONSE, + ARCHIVED_RESPONSE, + CANNOT_ARCHIVE_RESPONSE, + UNARCHIVED_RESPONSE, +) -class ModuleAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to module. +class ModuleListCreateAPIEndpoint(BaseAPIView): + """Module List and Create Endpoint""" - """ - - model = Module - permission_classes = [ProjectEntityPermission] serializer_class = ModuleSerializer + model = Module webhook_event = "module" + permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -136,9 +170,33 @@ class ModuleAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ) + @module_docs( + operation_id="create_module", + summary="Create module", + description="Create a new project module with specified name, description, and timeline.", + request=OpenApiRequest( + request=ModuleCreateSerializer, + examples=[MODULE_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Module created", + response=ModuleSerializer, + examples=[MODULE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 404: PROJECT_NOT_FOUND_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Create module + + Create a new project module with specified name, description, and timeline. + Automatically assigns the creator as module lead and tracks activity. + """ project = Project.objects.get(pk=project_id, workspace__slug=slug) - serializer = ModuleSerializer( + serializer = ModuleCreateSerializer( data=request.data, context={"project_id": project_id, "workspace_id": project.workspace_id}, ) @@ -170,19 +228,185 @@ class ModuleAPIEndpoint(BaseAPIView): # Send the model activity model_activity.delay( model_name="module", - model_id=str(serializer.data["id"]), + model_id=str(serializer.instance.id), requested_data=request.data, current_instance=None, actor_id=request.user.id, slug=slug, origin=base_host(request=request, is_app=True), ) - module = Module.objects.get(pk=serializer.data["id"]) + module = Module.objects.get(pk=serializer.instance.id) serializer = ModuleSerializer(module) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + @module_docs( + operation_id="list_modules", + summary="List modules", + description="Retrieve all modules in a project.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + ModuleSerializer, + "PaginatedModuleResponse", + "Paginated list of modules", + "Paginated Modules", + ), + 404: OpenApiResponse(description="Module not found"), + }, + ) + def get(self, request, slug, project_id): + """List or retrieve modules + + Retrieve all modules in a project or get details of a specific module. + Returns paginated results with module statistics and member information. + """ + return self.paginate( + request=request, + queryset=(self.get_queryset().filter(archived_at__isnull=True)), + on_results=lambda modules: ModuleSerializer( + modules, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + +class ModuleDetailAPIEndpoint(BaseAPIView): + """Module Detail Endpoint""" + + model = Module + permission_classes = [ProjectEntityPermission] + serializer_class = ModuleSerializer + webhook_event = "module" + use_read_replica = True + + def get_queryset(self): + return ( + Module.objects.filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related("module", "created_by"), + ) + ) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .annotate( + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .annotate( + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .annotate( + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + issue_module__deleted_at__isnull=True, + ), + distinct=True, + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + @module_docs( + operation_id="update_module", + summary="Update module", + description="Modify an existing module's properties like name, description, status, or timeline.", + parameters=[ + MODULE_PK_PARAMETER, + ], + request=OpenApiRequest( + request=ModuleUpdateSerializer, + examples=[MODULE_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Module updated successfully", + response=ModuleSerializer, + examples=[MODULE_EXAMPLE], + ), + 400: OpenApiResponse( + description="Invalid request data", + response=ModuleSerializer, + examples=[MODULE_UPDATE_EXAMPLE], + ), + 404: OpenApiResponse(description="Module not found"), + 409: OpenApiResponse( + description="Module with same external ID already exists" + ), + }, + ) def patch(self, request, slug, project_id, pk): + """Update module + + Modify an existing module's properties like name, description, status, or timeline. + Tracks all changes in model activity logs for audit purposes. + """ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug) current_instance = json.dumps( @@ -222,7 +446,7 @@ class ModuleAPIEndpoint(BaseAPIView): # Send the model activity model_activity.delay( model_name="module", - model_id=str(serializer.data["id"]), + model_id=str(serializer.instance.id), requested_data=request.data, current_instance=current_instance, actor_id=request.user.id, @@ -233,22 +457,50 @@ class ModuleAPIEndpoint(BaseAPIView): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - def get(self, request, slug, project_id, pk=None): - if pk: - queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) - data = ModuleSerializer( - queryset, fields=self.fields, expand=self.expand - ).data - return Response(data, status=status.HTTP_200_OK) - return self.paginate( - request=request, - queryset=(self.get_queryset().filter(archived_at__isnull=True)), - on_results=lambda modules: ModuleSerializer( - modules, many=True, fields=self.fields, expand=self.expand - ).data, - ) + @module_docs( + operation_id="retrieve_module", + summary="Retrieve module", + description="Retrieve details of a specific module.", + parameters=[ + MODULE_PK_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Module", + response=ModuleSerializer, + examples=[MODULE_EXAMPLE], + ), + 404: OpenApiResponse(description="Module not found"), + }, + ) + def get(self, request, slug, project_id, pk): + """Retrieve module + Retrieve details of a specific module. + """ + queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) + data = ModuleSerializer(queryset, fields=self.fields, expand=self.expand).data + return Response(data, status=status.HTTP_200_OK) + + @module_docs( + operation_id="delete_module", + summary="Delete module", + description="Permanently remove a module and all its associated issue relationships.", + parameters=[ + MODULE_PK_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + 403: ADMIN_ONLY_RESPONSE, + 404: MODULE_NOT_FOUND_RESPONSE, + }, + ) def delete(self, request, slug, project_id, pk): + """Delete module + + Permanently remove a module and all its associated issue relationships. + Only admins or the module creator can perform this action. + """ module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) if module.created_by_id != request.user.id and ( not ProjectMember.objects.filter( @@ -293,19 +545,14 @@ class ModuleAPIEndpoint(BaseAPIView): return Response(status=status.HTTP_204_NO_CONTENT) -class ModuleIssueAPIEndpoint(BaseAPIView): - """ - This viewset automatically provides `list`, `create`, `retrieve`, - `update` and `destroy` actions related to module issues. - - """ +class ModuleIssueListCreateAPIEndpoint(BaseAPIView): + """Module Work Item List and Create Endpoint""" serializer_class = ModuleIssueSerializer model = ModuleIssue webhook_event = "module_issue" - bulk = True - permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -333,7 +580,35 @@ class ModuleIssueAPIEndpoint(BaseAPIView): .distinct() ) + @module_issue_docs( + operation_id="list_module_work_items", + summary="List module work items", + description="Retrieve all work items assigned to a module with detailed information.", + parameters=[ + MODULE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + request={}, + responses={ + 200: create_paginated_response( + IssueSerializer, + "PaginatedModuleIssueResponse", + "Paginated list of module work items", + "Paginated Module Work Items", + ), + 404: OpenApiResponse(description="Module not found"), + }, + ) def get(self, request, slug, project_id, module_id): + """List module work items + + Retrieve all work items assigned to a module with detailed information. + Returns paginated results including assignees, labels, and attachments. + """ order_by = request.GET.get("order_by", "created_at") issues = ( Issue.issue_objects.filter( @@ -379,7 +654,33 @@ class ModuleIssueAPIEndpoint(BaseAPIView): ).data, ) + @module_issue_docs( + operation_id="add_module_work_items", + summary="Add Work Items to Module", + description="Assign multiple work items to a module or move them from another module. Automatically handles bulk creation and updates with activity tracking.", + parameters=[ + MODULE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=ModuleIssueRequestSerializer, + examples=[MODULE_ISSUE_REQUEST_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Module issues added", + response=ModuleIssueSerializer, + examples=[MODULE_ISSUE_EXAMPLE], + ), + 400: REQUIRED_FIELDS_RESPONSE, + 404: MODULE_NOT_FOUND_RESPONSE, + }, + ) def post(self, request, slug, project_id, module_id): + """Add module work items + + Assign multiple work items to a module or move them from another module. + Automatically handles bulk creation and updates with activity tracking. + """ issues = request.data.get("issues", []) if not len(issues): return Response( @@ -459,7 +760,143 @@ class ModuleIssueAPIEndpoint(BaseAPIView): status=status.HTTP_200_OK, ) + +class ModuleIssueDetailAPIEndpoint(BaseAPIView): + """ + This viewset automatically provides `list`, `create`, `retrieve`, + `update` and `destroy` actions related to module work items. + + """ + + serializer_class = ModuleIssueSerializer + model = ModuleIssue + webhook_event = "module_issue" + bulk = True + use_read_replica = True + + permission_classes = [ProjectEntityPermission] + + def get_queryset(self): + return ( + ModuleIssue.objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(module_id=self.kwargs.get("module_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(project__archived_at__isnull=True) + .select_related("project") + .select_related("workspace") + .select_related("module") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .prefetch_related("module__members") + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @module_issue_docs( + operation_id="retrieve_module_work_item", + summary="Retrieve module work item", + description="Retrieve details of a specific module work item.", + parameters=[ + MODULE_ID_PARAMETER, + ISSUE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + IssueSerializer, + "PaginatedModuleIssueDetailResponse", + "Paginated list of module work item details", + "Module Work Item Details", + ), + 404: OpenApiResponse(description="Module not found"), + }, + ) + def get(self, request, slug, project_id, module_id, issue_id): + """List module work items + + Retrieve all work items assigned to a module with detailed information. + Returns paginated results including assignees, labels, and attachments. + """ + order_by = request.GET.get("order_by", "created_at") + issues = ( + Issue.issue_objects.filter( + issue_module__module_id=module_id, + issue_module__deleted_at__isnull=True, + pk=issue_id, + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate(bridge_id=F("issue_module__id")) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("state") + .select_related("parent") + .prefetch_related("assignees") + .prefetch_related("labels") + .order_by(order_by) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=FileAsset.objects.filter( + issue_id=OuterRef("id"), + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + return self.paginate( + request=request, + queryset=(issues), + on_results=lambda issues: IssueSerializer( + issues, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + @module_issue_docs( + operation_id="delete_module_work_item", + summary="Delete module work item", + description="Remove a work item from a module while keeping the work item in the project.", + parameters=[ + MODULE_ID_PARAMETER, + ISSUE_ID_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + 404: MODULE_ISSUE_NOT_FOUND_RESPONSE, + }, + ) def delete(self, request, slug, project_id, module_id, issue_id): + """Remove module work item + + Remove a work item from a module while keeping the work item in the project. + Records the removal activity for tracking purposes. + """ module_issue = ModuleIssue.objects.get( workspace__slug=slug, project_id=project_id, @@ -483,6 +920,7 @@ class ModuleIssueAPIEndpoint(BaseAPIView): class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView): permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -573,7 +1011,34 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView): .order_by(self.kwargs.get("order_by", "-created_at")) ) - def get(self, request, slug, project_id, pk): + @module_docs( + operation_id="list_archived_modules", + summary="List archived modules", + description="Retrieve all modules that have been archived in the project.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + request={}, + responses={ + 200: create_paginated_response( + ModuleSerializer, + "PaginatedArchivedModuleResponse", + "Paginated list of archived modules", + "Paginated Archived Modules", + ), + 404: OpenApiResponse(description="Project not found"), + }, + ) + def get(self, request, slug, project_id): + """List archived modules + + Retrieve all modules that have been archived in the project. + Returns paginated results with module statistics. + """ return self.paginate( request=request, queryset=(self.get_queryset()), @@ -582,7 +1047,26 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView): ).data, ) + @module_docs( + operation_id="archive_module", + summary="Archive module", + description="Move a module to archived status for historical tracking.", + parameters=[ + MODULE_PK_PARAMETER, + ], + request={}, + responses={ + 204: ARCHIVED_RESPONSE, + 400: CANNOT_ARCHIVE_RESPONSE, + 404: MODULE_NOT_FOUND_RESPONSE, + }, + ) def post(self, request, slug, project_id, pk): + """Archive module + + Move a completed module to archived status for historical tracking. + Only modules with completed status can be archived. + """ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug) if module.status not in ["completed", "cancelled"]: return Response( @@ -599,7 +1083,24 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView): ).delete() return Response(status=status.HTTP_204_NO_CONTENT) + @module_docs( + operation_id="unarchive_module", + summary="Unarchive module", + description="Restore an archived module to active status, making it available for regular use.", + parameters=[ + MODULE_PK_PARAMETER, + ], + responses={ + 204: UNARCHIVED_RESPONSE, + 404: MODULE_NOT_FOUND_RESPONSE, + }, + ) def delete(self, request, slug, project_id, pk): + """Unarchive module + + Restore an archived module to active status, making it available for regular use. + The module will reappear in active module lists and become fully functional. + """ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug) module.archived_at = None module.save() diff --git a/apps/api/plane/api/views/project.py b/apps/api/plane/api/views/project.py index 038d4faec..da946e3c3 100644 --- a/apps/api/plane/api/views/project.py +++ b/apps/api/plane/api/views/project.py @@ -11,9 +11,8 @@ from django.core.serializers.json import DjangoJSONEncoder from rest_framework import status from rest_framework.response import Response from rest_framework.serializers import ValidationError +from drf_spectacular.utils import OpenApiResponse, OpenApiExample, OpenApiRequest -from plane.api.serializers import ProjectSerializer -from plane.app.permissions import ProjectBasePermission # Module imports from plane.db.models import ( @@ -31,16 +30,44 @@ from plane.db.models import ( from plane.bgtasks.webhook_task import model_activity, webhook_activity from .base import BaseAPIView from plane.utils.host import base_host +from plane.api.serializers import ( + ProjectSerializer, + ProjectCreateSerializer, + ProjectUpdateSerializer, +) +from plane.app.permissions import ProjectBasePermission +from plane.utils.openapi import ( + project_docs, + PROJECT_ID_PARAMETER, + PROJECT_PK_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + PROJECT_CREATE_EXAMPLE, + PROJECT_UPDATE_EXAMPLE, + # Response Examples + PROJECT_EXAMPLE, + PROJECT_NOT_FOUND_RESPONSE, + WORKSPACE_NOT_FOUND_RESPONSE, + PROJECT_NAME_TAKEN_RESPONSE, + DELETED_RESPONSE, + ARCHIVED_RESPONSE, + UNARCHIVED_RESPONSE, +) -class ProjectAPIEndpoint(BaseAPIView): - """Project Endpoints to create, update, list, retrieve and delete endpoint""" +class ProjectListCreateAPIEndpoint(BaseAPIView): + """Project List and Create Endpoint""" serializer_class = ProjectSerializer model = Project webhook_event = "project" - permission_classes = [ProjectBasePermission] + use_read_replica = True def get_queryset(self): return ( @@ -104,42 +131,87 @@ class ProjectAPIEndpoint(BaseAPIView): .distinct() ) - def get(self, request, slug, pk=None): - if pk is None: - sort_order_query = ProjectMember.objects.filter( - member=request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - is_active=True, - ).values("sort_order") - projects = ( - self.get_queryset() - .annotate(sort_order=Subquery(sort_order_query)) - .prefetch_related( - Prefetch( - "project_projectmember", - queryset=ProjectMember.objects.filter( - workspace__slug=slug, is_active=True - ).select_related("member"), - ) - ) - .order_by(request.GET.get("order_by", "sort_order")) - ) - return self.paginate( - request=request, - queryset=(projects), - on_results=lambda projects: ProjectSerializer( - projects, many=True, fields=self.fields, expand=self.expand - ).data, - ) - project = self.get_queryset().get(workspace__slug=slug, pk=pk) - serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand) - return Response(serializer.data, status=status.HTTP_200_OK) + @project_docs( + operation_id="list_projects", + summary="List or retrieve projects", + description="Retrieve all projects in a workspace or get details of a specific project.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + ORDER_BY_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + ProjectSerializer, + "PaginatedProjectResponse", + "Paginated list of projects", + "Paginated Projects", + ), + 404: PROJECT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug): + """List projects + Retrieve all projects in a workspace or get details of a specific project. + Returns projects ordered by user's custom sort order with member information. + """ + sort_order_query = ProjectMember.objects.filter( + member=request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ).values("sort_order") + projects = ( + self.get_queryset() + .annotate(sort_order=Subquery(sort_order_query)) + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, is_active=True + ).select_related("member"), + ) + ) + .order_by(request.GET.get("order_by", "sort_order")) + ) + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectSerializer( + projects, many=True, fields=self.fields, expand=self.expand + ).data, + ) + + @project_docs( + operation_id="create_project", + summary="Create project", + description="Create a new project in the workspace with default states and member assignments.", + request=OpenApiRequest( + request=ProjectCreateSerializer, + examples=[PROJECT_CREATE_EXAMPLE], + ), + responses={ + 201: OpenApiResponse( + description="Project created successfully", + response=ProjectSerializer, + examples=[PROJECT_EXAMPLE], + ), + 404: WORKSPACE_NOT_FOUND_RESPONSE, + 409: PROJECT_NAME_TAKEN_RESPONSE, + }, + ) def post(self, request, slug): + """Create project + + Create a new project in the workspace with default states and member assignments. + Automatically adds the creator as admin and sets up default workflow states. + """ try: workspace = Workspace.objects.get(slug=slug) - serializer = ProjectSerializer( + serializer = ProjectCreateSerializer( data={**request.data}, context={"workspace_id": workspace.id} ) if serializer.is_valid(): @@ -147,25 +219,25 @@ class ProjectAPIEndpoint(BaseAPIView): # Add the user as Administrator to the project _ = ProjectMember.objects.create( - project_id=serializer.data["id"], member=request.user, role=20 + project_id=serializer.instance.id, member=request.user, role=20 ) # Also create the issue property for the user _ = IssueUserProperty.objects.create( - project_id=serializer.data["id"], user=request.user + project_id=serializer.instance.id, user=request.user ) - if serializer.data["project_lead"] is not None and str( - serializer.data["project_lead"] + if serializer.instance.project_lead is not None and str( + serializer.instance.project_lead ) != str(request.user.id): ProjectMember.objects.create( - project_id=serializer.data["id"], - member_id=serializer.data["project_lead"], + project_id=serializer.instance.id, + member_id=serializer.instance.project_lead, role=20, ) # Also create the issue property for the user IssueUserProperty.objects.create( - project_id=serializer.data["id"], - user_id=serializer.data["project_lead"], + project_id=serializer.instance.id, + user_id=serializer.instance.project_lead, ) # Default states @@ -219,7 +291,7 @@ class ProjectAPIEndpoint(BaseAPIView): ] ) - project = self.get_queryset().filter(pk=serializer.data["id"]).first() + project = self.get_queryset().filter(pk=serializer.instance.id).first() # Model activity model_activity.delay( @@ -251,7 +323,131 @@ class ProjectAPIEndpoint(BaseAPIView): status=status.HTTP_409_CONFLICT, ) + +class ProjectDetailAPIEndpoint(BaseAPIView): + """Project Endpoints to update, retrieve and delete endpoint""" + + serializer_class = ProjectSerializer + model = Project + webhook_event = "project" + + permission_classes = [ProjectBasePermission] + use_read_replica = True + + def get_queryset(self): + return ( + Project.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter( + Q( + project_projectmember__member=self.request.user, + project_projectmember__is_active=True, + ) + | Q(network=2) + ) + .select_related( + "workspace", "workspace__owner", "default_assignee", "project_lead" + ) + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + member=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ) + ) + ) + .annotate( + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id"), member__is_bot=False, is_active=True + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_modules=Module.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + member_role=ProjectMember.objects.filter( + project_id=OuterRef("pk"), + member_id=self.request.user.id, + is_active=True, + ).values("role") + ) + .annotate( + is_deployed=Exists( + DeployBoard.objects.filter( + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + @project_docs( + operation_id="retrieve_project", + summary="Retrieve project", + description="Retrieve details of a specific project.", + parameters=[ + PROJECT_PK_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="Project details", + response=ProjectSerializer, + examples=[PROJECT_EXAMPLE], + ), + 404: PROJECT_NOT_FOUND_RESPONSE, + }, + ) + def get(self, request, slug, pk): + """Retrieve project + + Retrieve details of a specific project. + """ + project = self.get_queryset().get(workspace__slug=slug, pk=pk) + serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand) + return Response(serializer.data, status=status.HTTP_200_OK) + + @project_docs( + operation_id="update_project", + summary="Update project", + description="Partially update an existing project's properties like name, description, or settings.", + parameters=[ + PROJECT_PK_PARAMETER, + ], + request=OpenApiRequest( + request=ProjectUpdateSerializer, + examples=[PROJECT_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="Project updated successfully", + response=ProjectSerializer, + examples=[PROJECT_EXAMPLE], + ), + 404: PROJECT_NOT_FOUND_RESPONSE, + 409: PROJECT_NAME_TAKEN_RESPONSE, + }, + ) def patch(self, request, slug, pk): + """Update project + + Partially update an existing project's properties like name, description, or settings. + Tracks changes in model activity logs for audit purposes. + """ try: workspace = Workspace.objects.get(slug=slug) project = Project.objects.get(pk=pk) @@ -267,7 +463,7 @@ class ProjectAPIEndpoint(BaseAPIView): status=status.HTTP_400_BAD_REQUEST, ) - serializer = ProjectSerializer( + serializer = ProjectUpdateSerializer( project, data={**request.data, "intake_view": intake_view}, context={"workspace_id": workspace.id}, @@ -287,7 +483,7 @@ class ProjectAPIEndpoint(BaseAPIView): is_default=True, ) - project = self.get_queryset().filter(pk=serializer.data["id"]).first() + project = self.get_queryset().filter(pk=serializer.instance.id).first() model_activity.delay( model_name="project", @@ -318,7 +514,23 @@ class ProjectAPIEndpoint(BaseAPIView): status=status.HTTP_409_CONFLICT, ) + @project_docs( + operation_id="delete_project", + summary="Delete project", + description="Permanently remove a project and all its associated data from the workspace.", + parameters=[ + PROJECT_PK_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + }, + ) def delete(self, request, slug, pk): + """Delete project + + Permanently remove a project and all its associated data from the workspace. + Only admins can delete projects and the action cannot be undone. + """ project = Project.objects.get(pk=pk, workspace__slug=slug) # Delete the user favorite cycle UserFavorite.objects.filter( @@ -342,16 +554,52 @@ class ProjectAPIEndpoint(BaseAPIView): class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView): + """Project Archive and Unarchive Endpoint""" + permission_classes = [ProjectBasePermission] + @project_docs( + operation_id="archive_project", + summary="Archive project", + description="Move a project to archived status, hiding it from active project lists.", + parameters=[ + PROJECT_ID_PARAMETER, + ], + request={}, + responses={ + 204: ARCHIVED_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Archive project + + Move a project to archived status, hiding it from active project lists. + Archived projects remain accessible but are excluded from regular workflows. + """ project = Project.objects.get(pk=project_id, workspace__slug=slug) project.archived_at = timezone.now() project.save() UserFavorite.objects.filter(workspace__slug=slug, project=project_id).delete() return Response(status=status.HTTP_204_NO_CONTENT) + @project_docs( + operation_id="unarchive_project", + summary="Unarchive project", + description="Restore an archived project to active status, making it available in regular workflows.", + parameters=[ + PROJECT_ID_PARAMETER, + ], + request={}, + responses={ + 204: UNARCHIVED_RESPONSE, + }, + ) def delete(self, request, slug, project_id): + """Unarchive project + + Restore an archived project to active status, making it available in regular workflows. + The project will reappear in active project lists and become fully functional. + """ project = Project.objects.get(pk=project_id, workspace__slug=slug) project.archived_at = None project.save() diff --git a/apps/api/plane/api/views/state.py b/apps/api/plane/api/views/state.py index 0fbbd222a..7b5d842de 100644 --- a/apps/api/plane/api/views/state.py +++ b/apps/api/plane/api/views/state.py @@ -4,19 +4,41 @@ from django.db import IntegrityError # Third party imports from rest_framework import status from rest_framework.response import Response +from drf_spectacular.utils import OpenApiResponse, OpenApiExample, OpenApiRequest +# Module imports from plane.api.serializers import StateSerializer from plane.app.permissions import ProjectEntityPermission from plane.db.models import Issue, State - -# Module imports from .base import BaseAPIView +from plane.utils.openapi import ( + state_docs, + STATE_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + create_paginated_response, + # Request Examples + STATE_CREATE_EXAMPLE, + STATE_UPDATE_EXAMPLE, + # Response Examples + STATE_EXAMPLE, + INVALID_REQUEST_RESPONSE, + STATE_NAME_EXISTS_RESPONSE, + DELETED_RESPONSE, + STATE_CANNOT_DELETE_RESPONSE, + EXTERNAL_ID_EXISTS_RESPONSE, +) -class StateAPIEndpoint(BaseAPIView): +class StateListCreateAPIEndpoint(BaseAPIView): + """State List and Create Endpoint""" + serializer_class = StateSerializer model = State permission_classes = [ProjectEntityPermission] + use_read_replica = True def get_queryset(self): return ( @@ -33,7 +55,30 @@ class StateAPIEndpoint(BaseAPIView): .distinct() ) + @state_docs( + operation_id="create_state", + summary="Create state", + description="Create a new workflow state for a project with specified name, color, and group.", + request=OpenApiRequest( + request=StateSerializer, + examples=[STATE_CREATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="State created", + response=StateSerializer, + examples=[STATE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 409: STATE_NAME_EXISTS_RESPONSE, + }, + ) def post(self, request, slug, project_id): + """Create state + + Create a new workflow state for a project with specified name, color, and group. + Supports external ID tracking for integration purposes. + """ try: serializer = StateSerializer( data=request.data, context={"project_id": project_id} @@ -80,14 +125,31 @@ class StateAPIEndpoint(BaseAPIView): status=status.HTTP_409_CONFLICT, ) - def get(self, request, slug, project_id, state_id=None): - if state_id: - serializer = StateSerializer( - self.get_queryset().get(pk=state_id), - fields=self.fields, - expand=self.expand, - ) - return Response(serializer.data, status=status.HTTP_200_OK) + @state_docs( + operation_id="list_states", + summary="List states", + description="Retrieve all workflow states for a project.", + parameters=[ + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, + ], + responses={ + 200: create_paginated_response( + StateSerializer, + "PaginatedStateResponse", + "Paginated list of states", + "Paginated States", + ), + }, + ) + def get(self, request, slug, project_id): + """List states + + Retrieve all workflow states for a project. + Returns paginated results when listing all states. + """ return self.paginate( request=request, queryset=(self.get_queryset()), @@ -96,7 +158,76 @@ class StateAPIEndpoint(BaseAPIView): ).data, ) + +class StateDetailAPIEndpoint(BaseAPIView): + """State Detail Endpoint""" + + serializer_class = StateSerializer + model = State + permission_classes = [ProjectEntityPermission] + use_read_replica = True + + def get_queryset(self): + return ( + State.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(is_triage=False) + .filter(project__archived_at__isnull=True) + .select_related("project") + .select_related("workspace") + .distinct() + ) + + @state_docs( + operation_id="retrieve_state", + summary="Retrieve state", + description="Retrieve details of a specific state.", + parameters=[ + STATE_ID_PARAMETER, + ], + responses={ + 200: OpenApiResponse( + description="State retrieved", + response=StateSerializer, + examples=[STATE_EXAMPLE], + ), + }, + ) + def get(self, request, slug, project_id, state_id): + """Retrieve state + + Retrieve details of a specific state. + Returns paginated results when listing all states. + """ + serializer = StateSerializer( + self.get_queryset().get(pk=state_id), + fields=self.fields, + expand=self.expand, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + @state_docs( + operation_id="delete_state", + summary="Delete state", + description="Permanently remove a workflow state from a project. Default states and states with existing work items cannot be deleted.", + parameters=[ + STATE_ID_PARAMETER, + ], + responses={ + 204: DELETED_RESPONSE, + 400: STATE_CANNOT_DELETE_RESPONSE, + }, + ) def delete(self, request, slug, project_id, state_id): + """Delete state + + Permanently remove a workflow state from a project. + Default states and states with existing work items cannot be deleted. + """ state = State.objects.get( is_triage=False, pk=state_id, project_id=project_id, workspace__slug=slug ) @@ -119,7 +250,33 @@ class StateAPIEndpoint(BaseAPIView): state.delete() return Response(status=status.HTTP_204_NO_CONTENT) - def patch(self, request, slug, project_id, state_id=None): + @state_docs( + operation_id="update_state", + summary="Update state", + description="Partially update an existing workflow state's properties like name, color, or group.", + parameters=[ + STATE_ID_PARAMETER, + ], + request=OpenApiRequest( + request=StateSerializer, + examples=[STATE_UPDATE_EXAMPLE], + ), + responses={ + 200: OpenApiResponse( + description="State updated", + response=StateSerializer, + examples=[STATE_EXAMPLE], + ), + 400: INVALID_REQUEST_RESPONSE, + 409: EXTERNAL_ID_EXISTS_RESPONSE, + }, + ) + def patch(self, request, slug, project_id, state_id): + """Update state + + Partially update an existing workflow state's properties like name, color, or group. + Validates external ID uniqueness if provided. + """ state = State.objects.get( workspace__slug=slug, project_id=project_id, pk=state_id ) diff --git a/apps/api/plane/api/views/user.py b/apps/api/plane/api/views/user.py new file mode 100644 index 000000000..b874cec18 --- /dev/null +++ b/apps/api/plane/api/views/user.py @@ -0,0 +1,37 @@ +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from drf_spectacular.utils import OpenApiResponse + +# Module imports +from plane.api.serializers import UserLiteSerializer +from plane.api.views.base import BaseAPIView +from plane.db.models import User +from plane.utils.openapi.decorators import user_docs +from plane.utils.openapi import USER_EXAMPLE + + +class UserEndpoint(BaseAPIView): + serializer_class = UserLiteSerializer + model = User + + @user_docs( + operation_id="get_current_user", + summary="Get current user", + description="Retrieve the authenticated user's profile information including basic details.", + responses={ + 200: OpenApiResponse( + description="Current user profile", + response=UserLiteSerializer, + examples=[USER_EXAMPLE], + ), + }, + ) + def get(self, request): + """Get current user + + Retrieve the authenticated user's profile information including basic details. + Returns user data based on the current authentication context. + """ + serializer = UserLiteSerializer(request.user) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apps/api/plane/app/permissions/project.py b/apps/api/plane/app/permissions/project.py index 470960fcc..1596d90b3 100644 --- a/apps/api/plane/app/permissions/project.py +++ b/apps/api/plane/app/permissions/project.py @@ -75,12 +75,12 @@ class ProjectEntityPermission(BasePermission): return False # Handle requests based on project__identifier - if hasattr(view, "project__identifier") and view.project__identifier: + if hasattr(view, "project_identifier") and view.project_identifier: if request.method in SAFE_METHODS: return ProjectMember.objects.filter( workspace__slug=view.workspace_slug, member=request.user, - project__identifier=view.project__identifier, + project__identifier=view.project_identifier, is_active=True, ).exists() diff --git a/apps/api/plane/app/serializers/draft.py b/apps/api/plane/app/serializers/draft.py index 57600bff9..852caf8bf 100644 --- a/apps/api/plane/app/serializers/draft.py +++ b/apps/api/plane/app/serializers/draft.py @@ -1,3 +1,5 @@ +from lxml import html + # Django imports from django.utils import timezone @@ -21,7 +23,6 @@ from plane.db.models import ( ) from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) from plane.app.permissions import ROLE @@ -74,20 +75,24 @@ class DraftIssueCreateSerializer(BaseSerializer): raise serializers.ValidationError("Start date cannot exceed target date") # Validate description content for security - if "description" in attrs and attrs["description"]: - is_valid, error_msg = validate_json_content(attrs["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - if "description_html" in attrs and attrs["description_html"]: - is_valid, error_msg = validate_html_content(attrs["description_html"]) + is_valid, error_msg, sanitized_html = validate_html_content( + attrs["description_html"] + ) if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) + # Update the attrs with sanitized HTML if available + if sanitized_html is not None: + attrs["description_html"] = sanitized_html if "description_binary" in attrs and attrs["description_binary"]: is_valid, error_msg = validate_binary_data(attrs["description_binary"]) if not is_valid: - raise serializers.ValidationError({"description_binary": error_msg}) + raise serializers.ValidationError( + {"description_binary": "Invalid binary data"} + ) # Validate assignees are from project if attrs.get("assignee_ids", []): @@ -258,7 +263,7 @@ class DraftIssueCreateSerializer(BaseSerializer): DraftIssueLabel.objects.bulk_create( [ DraftIssueLabel( - label=label, + label_id=label, draft_issue=instance, workspace_id=workspace_id, project_id=project_id, diff --git a/apps/api/plane/app/serializers/issue.py b/apps/api/plane/app/serializers/issue.py index 897326431..1eda37601 100644 --- a/apps/api/plane/app/serializers/issue.py +++ b/apps/api/plane/app/serializers/issue.py @@ -1,3 +1,5 @@ +from lxml import html + # Django imports from django.utils import timezone from django.core.validators import URLValidator @@ -41,7 +43,6 @@ from plane.db.models import ( ) from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) @@ -126,20 +127,24 @@ class IssueCreateSerializer(BaseSerializer): raise serializers.ValidationError("Start date cannot exceed target date") # Validate description content for security - if "description" in attrs and attrs["description"]: - is_valid, error_msg = validate_json_content(attrs["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - if "description_html" in attrs and attrs["description_html"]: - is_valid, error_msg = validate_html_content(attrs["description_html"]) + is_valid, error_msg, sanitized_html = validate_html_content( + attrs["description_html"] + ) if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) + # Update the attrs with sanitized HTML if available + if sanitized_html is not None: + attrs["description_html"] = sanitized_html if "description_binary" in attrs and attrs["description_binary"]: is_valid, error_msg = validate_binary_data(attrs["description_binary"]) if not is_valid: - raise serializers.ValidationError({"description_binary": error_msg}) + raise serializers.ValidationError( + {"description_binary": "Invalid binary data"} + ) # Validate assignees are from project if attrs.get("assignee_ids", []): @@ -906,9 +911,14 @@ class IssueLiteSerializer(DynamicBaseSerializer): class IssueDetailSerializer(IssueSerializer): description_html = serializers.CharField() is_subscribed = serializers.BooleanField(read_only=True) + is_intake = serializers.BooleanField(read_only=True) class Meta(IssueSerializer.Meta): - fields = IssueSerializer.Meta.fields + ["description_html", "is_subscribed"] + fields = IssueSerializer.Meta.fields + [ + "description_html", + "is_subscribed", + "is_intake", + ] read_only_fields = fields diff --git a/apps/api/plane/app/serializers/page.py b/apps/api/plane/app/serializers/page.py index 78762e4b4..9ac6cc414 100644 --- a/apps/api/plane/app/serializers/page.py +++ b/apps/api/plane/app/serializers/page.py @@ -7,7 +7,6 @@ from .base import BaseSerializer from plane.utils.content_validator import ( validate_binary_data, validate_html_content, - validate_json_content, ) from plane.db.models import ( Page, @@ -229,23 +228,13 @@ class PageBinaryUpdateSerializer(serializers.Serializer): return value # Use the validation function from utils - is_valid, error_message = validate_html_content(value) + is_valid, error_message, sanitized_html = validate_html_content(value) if not is_valid: raise serializers.ValidationError(error_message) - return value + # Return sanitized HTML if available, otherwise return original + return sanitized_html if sanitized_html is not None else value - def validate_description(self, value): - """Validate the JSON description""" - if not value: - return value - - # Use the validation function from utils - is_valid, error_message = validate_json_content(value) - if not is_valid: - raise serializers.ValidationError(error_message) - - return value def update(self, instance, validated_data): """Update the page instance with validated data""" diff --git a/apps/api/plane/app/serializers/project.py b/apps/api/plane/app/serializers/project.py index dfa541d9f..1d1ea927d 100644 --- a/apps/api/plane/app/serializers/project.py +++ b/apps/api/plane/app/serializers/project.py @@ -15,7 +15,6 @@ from plane.db.models import ( ) from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) @@ -65,27 +64,18 @@ class ProjectSerializer(BaseSerializer): def validate(self, data): # Validate description content for security - if "description" in data and data["description"]: - # For Project, description might be text field, not JSON - if isinstance(data["description"], dict): - is_valid, error_msg = validate_json_content(data["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - - if "description_text" in data and data["description_text"]: - is_valid, error_msg = validate_json_content(data["description_text"]) - if not is_valid: - raise serializers.ValidationError({"description_text": error_msg}) - if "description_html" in data and data["description_html"]: - if isinstance(data["description_html"], dict): - is_valid, error_msg = validate_json_content(data["description_html"]) - else: - is_valid, error_msg = validate_html_content( - str(data["description_html"]) - ) + is_valid, error_msg, sanitized_html = validate_html_content( + str(data["description_html"]) + ) + # Update the data with sanitized HTML if available + if sanitized_html is not None: + data["description_html"] = sanitized_html + if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) return data diff --git a/apps/api/plane/app/serializers/workspace.py b/apps/api/plane/app/serializers/workspace.py index ec4c4bf63..6b22f59e8 100644 --- a/apps/api/plane/app/serializers/workspace.py +++ b/apps/api/plane/app/serializers/workspace.py @@ -26,7 +26,6 @@ from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS from plane.utils.url import contains_url from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) @@ -319,20 +318,24 @@ class StickySerializer(BaseSerializer): def validate(self, data): # Validate description content for security - if "description" in data and data["description"]: - is_valid, error_msg = validate_json_content(data["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - if "description_html" in data and data["description_html"]: - is_valid, error_msg = validate_html_content(data["description_html"]) + is_valid, error_msg, sanitized_html = validate_html_content( + data["description_html"] + ) if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) + # Update the data with sanitized HTML if available + if sanitized_html is not None: + data["description_html"] = sanitized_html if "description_binary" in data and data["description_binary"]: is_valid, error_msg = validate_binary_data(data["description_binary"]) if not is_valid: - raise serializers.ValidationError({"description_binary": error_msg}) + raise serializers.ValidationError( + {"description_binary": "Invalid binary data"} + ) return data diff --git a/apps/api/plane/app/views/analytic/advance.py b/apps/api/plane/app/views/analytic/advance.py index c690fbe7d..8a47cdd02 100644 --- a/apps/api/plane/app/views/analytic/advance.py +++ b/apps/api/plane/app/views/analytic/advance.py @@ -16,8 +16,6 @@ from plane.db.models import ( IssueView, ProjectPage, Workspace, - CycleIssue, - ModuleIssue, ProjectMember, ) from plane.utils.build_chart import build_analytics_chart diff --git a/apps/api/plane/app/views/asset/v2.py b/apps/api/plane/app/views/asset/v2.py index 7e0c14fdd..b69949621 100644 --- a/apps/api/plane/app/views/asset/v2.py +++ b/apps/api/plane/app/views/asset/v2.py @@ -441,7 +441,11 @@ class WorkspaceFileAssetEndpoint(BaseAPIView): # Get the presigned URL storage = S3Storage(request=request) # Generate a presigned URL to share an S3 object - signed_url = storage.generate_presigned_url(object_name=asset.asset.name) + signed_url = storage.generate_presigned_url( + object_name=asset.asset.name, + disposition="attachment", + filename=asset.attributes.get("name"), + ) # Redirect to the signed URL return HttpResponseRedirect(signed_url) @@ -641,7 +645,11 @@ class ProjectAssetEndpoint(BaseAPIView): # Get the presigned URL storage = S3Storage(request=request) # Generate a presigned URL to share an S3 object - signed_url = storage.generate_presigned_url(object_name=asset.asset.name) + signed_url = storage.generate_presigned_url( + object_name=asset.asset.name, + disposition="attachment", + filename=asset.attributes.get("name"), + ) # Redirect to the signed URL return HttpResponseRedirect(signed_url) diff --git a/apps/api/plane/app/views/base.py b/apps/api/plane/app/views/base.py index 92c374966..4cefb75a1 100644 --- a/apps/api/plane/app/views/base.py +++ b/apps/api/plane/app/views/base.py @@ -24,6 +24,7 @@ from rest_framework.viewsets import ModelViewSet from plane.authentication.session import BaseSessionAuthentication from plane.utils.exception_logger import log_exception from plane.utils.paginator import BasePaginator +from plane.utils.core.mixins import ReadReplicaControlMixin class TimezoneMixin: @@ -40,7 +41,7 @@ class TimezoneMixin: timezone.deactivate() -class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): +class BaseViewSet(TimezoneMixin, ReadReplicaControlMixin, ModelViewSet, BasePaginator): model = None permission_classes = [IsAuthenticated] @@ -53,6 +54,8 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): search_fields = [] + use_read_replica = False + def get_queryset(self): try: return self.model.objects.all() @@ -149,7 +152,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator): return expand if expand else None -class BaseAPIView(TimezoneMixin, APIView, BasePaginator): +class BaseAPIView(TimezoneMixin, ReadReplicaControlMixin, APIView, BasePaginator): permission_classes = [IsAuthenticated] filter_backends = (DjangoFilterBackend, SearchFilter) @@ -160,6 +163,8 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator): search_fields = [] + use_read_replica = False + def filter_queryset(self, queryset): for backend in list(self.filter_backends): queryset = backend().filter_queryset(self.request, queryset, self) diff --git a/apps/api/plane/app/views/issue/activity.py b/apps/api/plane/app/views/issue/activity.py index 91b973f11..b9ef58ffd 100644 --- a/apps/api/plane/app/views/issue/activity.py +++ b/apps/api/plane/app/views/issue/activity.py @@ -19,6 +19,7 @@ from plane.db.models import IssueActivity, IssueComment, CommentReaction, Intake class IssueActivityEndpoint(BaseAPIView): permission_classes = [ProjectEntityPermission] + use_read_replica = True @method_decorator(gzip_page) @allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST]) diff --git a/apps/api/plane/app/views/issue/archive.py b/apps/api/plane/app/views/issue/archive.py index 118d1e7f9..122f4bdc8 100644 --- a/apps/api/plane/app/views/issue/archive.py +++ b/apps/api/plane/app/views/issue/archive.py @@ -3,7 +3,7 @@ import json # Django imports from django.core.serializers.json import DjangoJSONEncoder -from django.db.models import F, Func, OuterRef, Q, Prefetch, Exists, Subquery +from django.db.models import F, Func, OuterRef, Q, Prefetch, Exists, Subquery, Count from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page @@ -69,25 +69,31 @@ class IssueArchiveViewSet(BaseViewSet): ) ) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=FileAsset.objects.filter( - issue_id=OuterRef("id"), - entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + link_count=Subquery( + IssueLink.objects.filter(issue=OuterRef("id")) + .values("issue") + .annotate(count=Count("id")) + .values("count") ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + attachment_count=Subquery( + FileAsset.objects.filter( + issue_id=OuterRef("id"), + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + ) + .values("issue_id") + .annotate(count=Count("id")) + .values("count") + ) + ) + .annotate( + sub_issues_count=Subquery( + Issue.issue_objects.filter(parent=OuterRef("id")) + .values("parent") + .annotate(count=Count("id")) + .values("count") + ) ) ) @@ -101,6 +107,19 @@ class IssueArchiveViewSet(BaseViewSet): issue_queryset = self.get_queryset().filter(**filters) + total_issue_queryset = Issue.objects.filter( + deleted_at__isnull=True, + archived_at__isnull=False, + project_id=project_id, + workspace__slug=slug, + ).filter(**filters) + + total_issue_queryset = ( + total_issue_queryset + if show_sub_issues == "true" + else total_issue_queryset.filter(parent__isnull=True) + ) + issue_queryset = ( issue_queryset if show_sub_issues == "true" @@ -136,6 +155,7 @@ class IssueArchiveViewSet(BaseViewSet): request=request, order_by=order_by_param, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), @@ -170,6 +190,7 @@ class IssueArchiveViewSet(BaseViewSet): request=request, order_by=order_by_param, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), @@ -196,6 +217,7 @@ class IssueArchiveViewSet(BaseViewSet): order_by=order_by_param, request=request, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), diff --git a/apps/api/plane/app/views/issue/base.py b/apps/api/plane/app/views/issue/base.py index d0b4e7d5e..4d0d4457e 100644 --- a/apps/api/plane/app/views/issue/base.py +++ b/apps/api/plane/app/views/issue/base.py @@ -15,6 +15,7 @@ from django.db.models import ( UUIDField, Value, Subquery, + Count, ) from django.db.models.functions import Coalesce from django.utils import timezone @@ -50,6 +51,7 @@ from plane.db.models import ( IssueRelation, IssueAssignee, IssueLabel, + IntakeIssue, ) from plane.utils.grouper import ( issue_group_values, @@ -212,27 +214,33 @@ class IssueViewSet(BaseViewSet): ) ) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=FileAsset.objects.filter( - issue_id=OuterRef("id"), - entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + link_count=Subquery( + IssueLink.objects.filter(issue=OuterRef("id")) + .values("issue") + .annotate(count=Count("id")) + .values("count") ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + attachment_count=Subquery( + FileAsset.objects.filter( + issue_id=OuterRef("id"), + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + ) + .values("issue_id") + .annotate(count=Count("id")) + .values("count") + ) ) - ).distinct() + .annotate( + sub_issues_count=Subquery( + Issue.issue_objects.filter(parent=OuterRef("id")) + .values("parent") + .annotate(count=Count("id")) + .values("count") + ) + ) + ) @method_decorator(gzip_page) @allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST]) @@ -248,6 +256,10 @@ class IssueViewSet(BaseViewSet): issue_queryset = self.get_queryset().filter(**filters, **extra_filters) # Custom ordering for priority and state + total_issue_queryset = Issue.issue_objects.filter( + project_id=project_id, workspace__slug=slug + ).filter(**filters, **extra_filters) + # Issue queryset issue_queryset, order_by_param = order_issue_queryset( issue_queryset=issue_queryset, order_by_param=order_by_param @@ -280,6 +292,7 @@ class IssueViewSet(BaseViewSet): and not project.guest_view_all_features ): issue_queryset = issue_queryset.filter(created_by=request.user) + total_issue_queryset = total_issue_queryset.filter(created_by=request.user) if group_by: if sub_group_by: @@ -295,6 +308,7 @@ class IssueViewSet(BaseViewSet): request=request, order_by=order_by_param, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), @@ -328,6 +342,7 @@ class IssueViewSet(BaseViewSet): request=request, order_by=order_by_param, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), @@ -353,6 +368,7 @@ class IssueViewSet(BaseViewSet): order_by=order_by_param, request=request, queryset=issue_queryset, + total_count_queryset=total_issue_queryset, on_results=lambda issues: issue_on_results( group_by=group_by, issues=issues, sub_group_by=sub_group_by ), @@ -453,10 +469,12 @@ class IssueViewSet(BaseViewSet): project = Project.objects.get(pk=project_id, workspace__slug=slug) issue = ( - Issue.objects.filter(project_id=self.kwargs.get("project_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") + Issue.objects.filter( + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + pk=pk, + ) + .select_related("state") .annotate( cycle_id=Subquery( CycleIssue.objects.filter(issue=OuterRef("id")).values("cycle_id")[ @@ -465,60 +483,63 @@ class IssueViewSet(BaseViewSet): ) ) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=FileAsset.objects.filter( - issue_id=OuterRef("id"), - entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + link_count=Subquery( + IssueLink.objects.filter(issue=OuterRef("id")) + .values("issue") + .annotate(count=Count("id")) + .values("count") ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + attachment_count=Subquery( + FileAsset.objects.filter( + issue_id=OuterRef("id"), + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + ) + .values("issue_id") + .annotate(count=Count("id")) + .values("count") + ) + ) + .annotate( + sub_issues_count=Subquery( + Issue.issue_objects.filter(parent=OuterRef("id")) + .values("parent") + .annotate(count=Count("id")) + .values("count") + ) ) - .filter(pk=pk) .annotate( label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=Q( - ~Q(labels__id__isnull=True) - & Q(label_issue__deleted_at__isnull=True) - ), + Subquery( + IssueLabel.objects.filter(issue_id=OuterRef("pk")) + .values("issue_id") + .annotate(arr=ArrayAgg("label_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=Q( - ~Q(assignees__id__isnull=True) - & Q(assignees__member_project__is_active=True) - & Q(issue_assignee__deleted_at__isnull=True) - ), + Subquery( + IssueAssignee.objects.filter( + issue_id=OuterRef("pk"), + assignee__member_project__is_active=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("assignee_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=Q( - ~Q(issue_module__module_id__isnull=True) - & Q(issue_module__module__archived_at__isnull=True) - & Q(issue_module__deleted_at__isnull=True) - ), + Subquery( + ModuleIssue.objects.filter( + issue_id=OuterRef("pk"), + module__archived_at__isnull=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("module_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), @@ -786,37 +807,42 @@ class IssuePaginatedViewSet(BaseViewSet): ) return ( - issue_queryset.select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") + issue_queryset.select_related("state") .annotate( cycle_id=Subquery( - CycleIssue.objects.filter( - issue=OuterRef("id"), deleted_at__isnull=True - ).values("cycle_id")[:1] + CycleIssue.objects.filter(issue=OuterRef("id")).values("cycle_id")[ + :1 + ] ) ) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=FileAsset.objects.filter( - issue_id=OuterRef("id"), - entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + link_count=Subquery( + IssueLink.objects.filter(issue=OuterRef("id")) + .values("issue") + .annotate(count=Count("id")) + .values("count") ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + attachment_count=Subquery( + FileAsset.objects.filter( + issue_id=OuterRef("id"), + entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, + ) + .values("issue_id") + .annotate(count=Count("id")) + .values("count") + ) ) - ).distinct() + .annotate( + sub_issues_count=Subquery( + Issue.issue_objects.filter(parent=OuterRef("id")) + .values("parent") + .annotate(count=Count("id")) + .values("count") + ) + ) + ) def process_paginated_result(self, fields, results, timezone): paginated_data = results.values(*fields) @@ -896,37 +922,35 @@ class IssuePaginatedViewSet(BaseViewSet): queryset = queryset.annotate( label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=Q( - ~Q(labels__id__isnull=True) - & Q(label_issue__deleted_at__isnull=True) - ), + Subquery( + IssueLabel.objects.filter(issue_id=OuterRef("pk")) + .values("issue_id") + .annotate(arr=ArrayAgg("label_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=Q( - ~Q(assignees__id__isnull=True) - & Q(assignees__member_project__is_active=True) - & Q(issue_assignee__deleted_at__isnull=True) - ), + Subquery( + IssueAssignee.objects.filter( + issue_id=OuterRef("pk"), + assignee__member_project__is_active=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("assignee_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=Q( - ~Q(issue_module__module_id__isnull=True) - & Q(issue_module__module__archived_at__isnull=True) - & Q(issue_module__deleted_at__isnull=True) - ), + Subquery( + ModuleIssue.objects.filter( + issue_id=OuterRef("pk"), + module__archived_at__isnull=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("module_id", distinct=True)) + .values("arr") ), Value([], output_field=ArrayField(UUIDField())), ), @@ -1200,7 +1224,7 @@ class IssueDetailIdentifierEndpoint(BaseAPIView): # Fetch the issue issue = ( - Issue.issue_objects.filter(project_id=project.id) + Issue.objects.filter(project_id=project.id) .filter(workspace__slug=slug) .select_related("workspace", "project", "state", "parent") .prefetch_related("assignees", "labels", "issue_module__module") @@ -1292,6 +1316,16 @@ class IssueDetailIdentifierEndpoint(BaseAPIView): ) ) ) + .annotate( + is_intake=Exists( + IntakeIssue.objects.filter( + issue=OuterRef("id"), + status__in=[-2, 0], + workspace__slug=slug, + project_id=project.id, + ) + ) + ) ).first() # Check if the issue exists diff --git a/apps/api/plane/app/views/notification/base.py b/apps/api/plane/app/views/notification/base.py index e84cf4d29..329599c15 100644 --- a/apps/api/plane/app/views/notification/base.py +++ b/apps/api/plane/app/views/notification/base.py @@ -232,6 +232,8 @@ class NotificationViewSet(BaseViewSet, BasePaginator): class UnreadNotificationEndpoint(BaseAPIView): + use_read_replica = True + @allow_permission( allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE" ) diff --git a/apps/api/plane/app/views/page/base.py b/apps/api/plane/app/views/page/base.py index 96de81abf..e4ee1890b 100644 --- a/apps/api/plane/app/views/page/base.py +++ b/apps/api/plane/app/views/page/base.py @@ -198,6 +198,7 @@ class PageViewSet(BaseViewSet): def retrieve(self, request, slug, project_id, pk=None): page = self.get_queryset().filter(pk=pk).first() project = Project.objects.get(pk=project_id) + track_visit = request.query_params.get("track_visit", "true").lower() == "true" """ if the role is guest and guest_view_all_features is false and owned by is not @@ -230,13 +231,14 @@ class PageViewSet(BaseViewSet): ).values_list("entity_identifier", flat=True) data = PageDetailSerializer(page).data data["issue_ids"] = issue_ids - recent_visited_task.delay( - slug=slug, - entity_name="page", - entity_identifier=pk, - user_id=request.user.id, - project_id=project_id, - ) + if track_visit: + recent_visited_task.delay( + slug=slug, + entity_name="page", + entity_identifier=pk, + user_id=request.user.id, + project_id=project_id, + ) return Response(data, status=status.HTTP_200_OK) @allow_permission([ROLE.ADMIN], model=Page, creator=True) diff --git a/apps/api/plane/app/views/project/base.py b/apps/api/plane/app/views/project/base.py index 1da2aa84b..b4ee113c4 100644 --- a/apps/api/plane/app/views/project/base.py +++ b/apps/api/plane/app/views/project/base.py @@ -46,6 +46,7 @@ class ProjectViewSet(BaseViewSet): serializer_class = ProjectListSerializer model = Project webhook_event = "project" + use_read_replica = True def get_queryset(self): sort_order = ProjectMember.objects.filter( diff --git a/apps/api/plane/app/views/project/member.py b/apps/api/plane/app/views/project/member.py index 60d960fe5..0b09c1366 100644 --- a/apps/api/plane/app/views/project/member.py +++ b/apps/api/plane/app/views/project/member.py @@ -312,6 +312,7 @@ class ProjectMemberUserEndpoint(BaseAPIView): class UserProjectRolesEndpoint(BaseAPIView): permission_classes = [WorkspaceUserPermission] + use_read_replica = True def get(self, request, slug): project_members = ProjectMember.objects.filter( diff --git a/apps/api/plane/app/views/search/issue.py b/apps/api/plane/app/views/search/issue.py index ed826782a..b3bce1eda 100644 --- a/apps/api/plane/app/views/search/issue.py +++ b/apps/api/plane/app/views/search/issue.py @@ -59,9 +59,10 @@ class IssueSearchEndpoint(BaseAPIView): ) related_issue_ids = [item for sublist in related_issue_ids for item in sublist] + related_issue_ids.append(issue_id) if issue: - issues = issues.filter(~Q(pk=issue_id), ~Q(pk__in=related_issue_ids)) + issues = issues.exclude(pk__in=related_issue_ids) return issues diff --git a/apps/api/plane/app/views/user/base.py b/apps/api/plane/app/views/user/base.py index 4eca872f3..08389d50c 100644 --- a/apps/api/plane/app/views/user/base.py +++ b/apps/api/plane/app/views/user/base.py @@ -44,6 +44,7 @@ from django.views.decorators.vary import vary_on_cookie class UserEndpoint(BaseViewSet): serializer_class = UserSerializer model = User + use_read_replica = True def get_object(self): return self.request.user diff --git a/apps/api/plane/app/views/workspace/base.py b/apps/api/plane/app/views/workspace/base.py index 922b39cc9..a37624d2a 100644 --- a/apps/api/plane/app/views/workspace/base.py +++ b/apps/api/plane/app/views/workspace/base.py @@ -177,6 +177,7 @@ class WorkSpaceViewSet(BaseViewSet): class UserWorkSpacesEndpoint(BaseAPIView): search_fields = ["name"] filterset_fields = ["owner"] + use_read_replica = True def get(self, request): fields = [field for field in request.GET.get("fields", "").split(",") if field] diff --git a/apps/api/plane/app/views/workspace/cycle.py b/apps/api/plane/app/views/workspace/cycle.py index eb899553d..73deca059 100644 --- a/apps/api/plane/app/views/workspace/cycle.py +++ b/apps/api/plane/app/views/workspace/cycle.py @@ -10,7 +10,6 @@ from plane.app.views.base import BaseAPIView from plane.db.models import Cycle from plane.app.permissions import WorkspaceViewerPermission from plane.app.serializers.cycle import CycleSerializer -from plane.utils.timezone_converter import user_timezone_converter class WorkspaceCyclesEndpoint(BaseAPIView): diff --git a/apps/api/plane/app/views/workspace/draft.py b/apps/api/plane/app/views/workspace/draft.py index a5e61d6b4..e4b032725 100644 --- a/apps/api/plane/app/views/workspace/draft.py +++ b/apps/api/plane/app/views/workspace/draft.py @@ -172,12 +172,14 @@ class WorkspaceDraftIssueViewSet(BaseViewSet): {"error": "Issue not found"}, status=status.HTTP_404_NOT_FOUND ) + project_id = request.data.get("project_id", issue.project_id) + serializer = DraftIssueCreateSerializer( issue, data=request.data, partial=True, context={ - "project_id": request.data.get("project_id", None), + "project_id": project_id, "cycle_id": request.data.get("cycle_id", "not_provided"), }, ) diff --git a/apps/api/plane/app/views/workspace/estimate.py b/apps/api/plane/app/views/workspace/estimate.py index beef2a8ec..8b0981f9e 100644 --- a/apps/api/plane/app/views/workspace/estimate.py +++ b/apps/api/plane/app/views/workspace/estimate.py @@ -12,6 +12,7 @@ from plane.utils.cache import cache_response class WorkspaceEstimatesEndpoint(BaseAPIView): permission_classes = [WorkspaceEntityPermission] + use_read_replica = True @cache_response(60 * 60 * 2) def get(self, request, slug): diff --git a/apps/api/plane/app/views/workspace/favorite.py b/apps/api/plane/app/views/workspace/favorite.py index ad2f24883..ee126fa5b 100644 --- a/apps/api/plane/app/views/workspace/favorite.py +++ b/apps/api/plane/app/views/workspace/favorite.py @@ -14,6 +14,8 @@ from plane.app.permissions import allow_permission, ROLE class WorkspaceFavoriteEndpoint(BaseAPIView): + use_read_replica = True + @allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE") def get(self, request, slug): # the second filter is to check if the user is a member of the project diff --git a/apps/api/plane/app/views/workspace/label.py b/apps/api/plane/app/views/workspace/label.py index c93cd44c8..11ca6b913 100644 --- a/apps/api/plane/app/views/workspace/label.py +++ b/apps/api/plane/app/views/workspace/label.py @@ -12,6 +12,7 @@ from plane.utils.cache import cache_response class WorkspaceLabelsEndpoint(BaseAPIView): permission_classes = [WorkspaceViewerPermission] + use_read_replica = True @cache_response(60 * 60 * 2) def get(self, request, slug): diff --git a/apps/api/plane/app/views/workspace/member.py b/apps/api/plane/app/views/workspace/member.py index 7743ff4cd..84985cec3 100644 --- a/apps/api/plane/app/views/workspace/member.py +++ b/apps/api/plane/app/views/workspace/member.py @@ -28,6 +28,7 @@ class WorkSpaceMemberViewSet(BaseViewSet): model = WorkspaceMember search_fields = ["member__display_name", "member__first_name"] + use_read_replica = True def get_queryset(self): return self.filter_queryset( @@ -214,6 +215,8 @@ class WorkspaceMemberUserViewsEndpoint(BaseAPIView): class WorkspaceMemberUserEndpoint(BaseAPIView): + use_read_replica = True + def get(self, request, slug): draft_issue_count = ( DraftIssue.objects.filter( diff --git a/apps/api/plane/app/views/workspace/quick_link.py b/apps/api/plane/app/views/workspace/quick_link.py index b7decea95..104ca00d2 100644 --- a/apps/api/plane/app/views/workspace/quick_link.py +++ b/apps/api/plane/app/views/workspace/quick_link.py @@ -11,6 +11,7 @@ from plane.app.permissions import allow_permission, ROLE class QuickLinkViewSet(BaseViewSet): model = WorkspaceUserLink + use_read_replica = True def get_serializer_class(self): return WorkspaceUserLinkSerializer diff --git a/apps/api/plane/app/views/workspace/recent_visit.py b/apps/api/plane/app/views/workspace/recent_visit.py index 4fe15b513..e1c50c8b6 100644 --- a/apps/api/plane/app/views/workspace/recent_visit.py +++ b/apps/api/plane/app/views/workspace/recent_visit.py @@ -12,6 +12,7 @@ from plane.app.permissions import allow_permission, ROLE class UserRecentVisitViewSet(BaseViewSet): model = UserRecentVisit + use_read_replica = True def get_serializer_class(self): return WorkspaceRecentVisitSerializer diff --git a/apps/api/plane/app/views/workspace/state.py b/apps/api/plane/app/views/workspace/state.py index 08bc2be28..3bfc8d22d 100644 --- a/apps/api/plane/app/views/workspace/state.py +++ b/apps/api/plane/app/views/workspace/state.py @@ -7,14 +7,13 @@ from plane.app.serializers import StateSerializer from plane.app.views.base import BaseAPIView from plane.db.models import State from plane.app.permissions import WorkspaceEntityPermission -from plane.utils.cache import cache_response from collections import defaultdict class WorkspaceStatesEndpoint(BaseAPIView): permission_classes = [WorkspaceEntityPermission] + use_read_replica = True - @cache_response(60 * 60 * 2) def get(self, request, slug): states = State.objects.filter( workspace__slug=slug, diff --git a/apps/api/plane/app/views/workspace/sticky.py b/apps/api/plane/app/views/workspace/sticky.py index 4870a6abe..8b9654716 100644 --- a/apps/api/plane/app/views/workspace/sticky.py +++ b/apps/api/plane/app/views/workspace/sticky.py @@ -12,6 +12,7 @@ from plane.app.serializers import StickySerializer class WorkspaceStickyViewSet(BaseViewSet): serializer_class = StickySerializer model = Sticky + use_read_replica = True def get_queryset(self): return self.filter_queryset( diff --git a/apps/api/plane/app/views/workspace/user_preference.py b/apps/api/plane/app/views/workspace/user_preference.py index 7cfa740e8..8bcf6b309 100644 --- a/apps/api/plane/app/views/workspace/user_preference.py +++ b/apps/api/plane/app/views/workspace/user_preference.py @@ -13,6 +13,7 @@ from rest_framework import status class WorkspaceUserPreferenceViewSet(BaseAPIView): model = WorkspaceUserPreference + use_read_replica = True def get_serializer_class(self): return WorkspaceUserPreferenceSerializer diff --git a/apps/api/plane/authentication/provider/oauth/github.py b/apps/api/plane/authentication/provider/oauth/github.py index d8116cec3..ecf7ed183 100644 --- a/apps/api/plane/authentication/provider/oauth/github.py +++ b/apps/api/plane/authentication/provider/oauth/github.py @@ -18,7 +18,7 @@ from plane.authentication.adapter.error import ( class GitHubOAuthProvider(OauthAdapter): token_url = "https://github.com/login/oauth/access_token" userinfo_url = "https://api.github.com/user" - org_membership_url = f"https://api.github.com/orgs" + org_membership_url = "https://api.github.com/orgs" provider = "github" scope = "read:user user:email" diff --git a/apps/api/plane/bgtasks/api_logs_task.py b/apps/api/plane/bgtasks/api_logs_task.py deleted file mode 100644 index 038b939d5..000000000 --- a/apps/api/plane/bgtasks/api_logs_task.py +++ /dev/null @@ -1,15 +0,0 @@ -from django.utils import timezone -from datetime import timedelta -from plane.db.models import APIActivityLog -from celery import shared_task - - -@shared_task -def delete_api_logs(): - # Get the logs older than 30 days to delete - logs_to_delete = APIActivityLog.objects.filter( - created_at__lte=timezone.now() - timedelta(days=30) - ) - - # Delete the logs - logs_to_delete._raw_delete(logs_to_delete.db) diff --git a/apps/api/plane/bgtasks/cleanup_task.py b/apps/api/plane/bgtasks/cleanup_task.py new file mode 100644 index 000000000..c9d86b639 --- /dev/null +++ b/apps/api/plane/bgtasks/cleanup_task.py @@ -0,0 +1,423 @@ +# Python imports +from datetime import timedelta +import logging +from typing import List, Dict, Any, Callable, Optional +import os + +# Django imports +from django.utils import timezone +from django.db.models import F, Window, Subquery +from django.db.models.functions import RowNumber + +# Third party imports +from celery import shared_task +from pymongo.errors import BulkWriteError +from pymongo.collection import Collection +from pymongo.operations import InsertOne + +# Module imports +from plane.db.models import ( + EmailNotificationLog, + PageVersion, + APIActivityLog, + IssueDescriptionVersion, +) +from plane.settings.mongo import MongoConnection +from plane.utils.exception_logger import log_exception + + +logger = logging.getLogger("plane.worker") +BATCH_SIZE = 1000 + + +def get_mongo_collection(collection_name: str) -> Optional[Collection]: + """Get MongoDB collection if available, otherwise return None.""" + if not MongoConnection.is_configured(): + logger.info("MongoDB not configured") + return None + + try: + mongo_collection = MongoConnection.get_collection(collection_name) + logger.info(f"MongoDB collection '{collection_name}' connected successfully") + return mongo_collection + except Exception as e: + logger.error(f"Failed to get MongoDB collection: {str(e)}") + log_exception(e) + return None + + +def flush_to_mongo_and_delete( + mongo_collection: Optional[Collection], + buffer: List[Dict[str, Any]], + ids_to_delete: List[int], + model, + mongo_available: bool, +) -> None: + """ + Inserts a batch of records into MongoDB and deletes the corresponding rows from PostgreSQL. + """ + if not buffer: + logger.debug("No records to flush - buffer is empty") + return + + logger.info( + f"Starting batch flush: {len(buffer)} records, {len(ids_to_delete)} IDs to delete" + ) + + mongo_archival_failed = False + + # Try to insert into MongoDB if available + if mongo_collection is not None and mongo_available: + try: + mongo_collection.bulk_write([InsertOne(doc) for doc in buffer]) + except BulkWriteError as bwe: + logger.error(f"MongoDB bulk write error: {str(bwe)}") + log_exception(bwe) + mongo_archival_failed = True + + # If MongoDB is available and archival failed, log the error and return + if mongo_available and mongo_archival_failed: + logger.error(f"MongoDB archival failed for {len(buffer)} records") + return + + # Delete from PostgreSQL - delete() returns (count, {model: count}) + delete_result = model.all_objects.filter(id__in=ids_to_delete).delete() + deleted_count = ( + delete_result[0] if delete_result and isinstance(delete_result, tuple) else 0 + ) + logger.info(f"Batch flush completed: {deleted_count} records deleted") + + +def process_cleanup_task( + queryset_func: Callable, + transform_func: Callable[[Dict], Dict], + model, + task_name: str, + collection_name: str, +): + """ + Generic function to process cleanup tasks. + + Args: + queryset_func: Function that returns the queryset to process + transform_func: Function to transform each record for MongoDB + model: Django model class + task_name: Name of the task for logging + collection_name: MongoDB collection name + """ + logger.info(f"Starting {task_name} cleanup task") + + # Get MongoDB collection + mongo_collection = get_mongo_collection(collection_name) + mongo_available = mongo_collection is not None + + # Get queryset + queryset = queryset_func() + + # Process records in batches + buffer: List[Dict[str, Any]] = [] + ids_to_delete: List[int] = [] + total_processed = 0 + total_batches = 0 + + for record in queryset: + # Transform record for MongoDB + buffer.append(transform_func(record)) + ids_to_delete.append(record["id"]) + + # Flush batch when it reaches BATCH_SIZE + if len(buffer) >= BATCH_SIZE: + total_batches += 1 + flush_to_mongo_and_delete( + mongo_collection=mongo_collection, + buffer=buffer, + ids_to_delete=ids_to_delete, + model=model, + mongo_available=mongo_available, + ) + total_processed += len(buffer) + buffer.clear() + ids_to_delete.clear() + + # Process final batch if any records remain + if buffer: + total_batches += 1 + flush_to_mongo_and_delete( + mongo_collection=mongo_collection, + buffer=buffer, + ids_to_delete=ids_to_delete, + model=model, + mongo_available=mongo_available, + ) + total_processed += len(buffer) + + logger.info( + f"{task_name} cleanup task completed", + extra={ + "total_records_processed": total_processed, + "total_batches": total_batches, + "mongo_available": mongo_available, + "collection_name": collection_name, + }, + ) + + +# Transform functions for each model +def transform_api_log(record: Dict) -> Dict: + """Transform API activity log record.""" + return { + "id": str(record["id"]), + "created_at": str(record["created_at"]) if record.get("created_at") else None, + "token_identifier": str(record["token_identifier"]), + "path": record["path"], + "method": record["method"], + "query_params": record.get("query_params"), + "headers": record.get("headers"), + "body": record.get("body"), + "response_code": record["response_code"], + "response_body": record["response_body"], + "ip_address": record["ip_address"], + "user_agent": record["user_agent"], + "created_by_id": str(record["created_by_id"]), + } + + +def transform_email_log(record: Dict) -> Dict: + """Transform email notification log record.""" + return { + "id": str(record["id"]), + "created_at": str(record["created_at"]) if record.get("created_at") else None, + "receiver_id": str(record["receiver_id"]), + "triggered_by_id": str(record["triggered_by_id"]), + "entity_identifier": str(record["entity_identifier"]), + "entity_name": record["entity_name"], + "data": record["data"], + "processed_at": ( + str(record["processed_at"]) if record.get("processed_at") else None + ), + "sent_at": str(record["sent_at"]) if record.get("sent_at") else None, + "entity": record["entity"], + "old_value": str(record["old_value"]), + "new_value": str(record["new_value"]), + "created_by_id": str(record["created_by_id"]), + } + + +def transform_page_version(record: Dict) -> Dict: + """Transform page version record.""" + return { + "id": str(record["id"]), + "created_at": str(record["created_at"]) if record.get("created_at") else None, + "page_id": str(record["page_id"]), + "workspace_id": str(record["workspace_id"]), + "owned_by_id": str(record["owned_by_id"]), + "description_html": record["description_html"], + "description_binary": record["description_binary"], + "description_stripped": record["description_stripped"], + "description_json": record["description_json"], + "sub_pages_data": record["sub_pages_data"], + "created_by_id": str(record["created_by_id"]), + "updated_by_id": str(record["updated_by_id"]), + "deleted_at": str(record["deleted_at"]) if record.get("deleted_at") else None, + "last_saved_at": ( + str(record["last_saved_at"]) if record.get("last_saved_at") else None + ), + } + + +def transform_issue_description_version(record: Dict) -> Dict: + """Transform issue description version record.""" + return { + "id": str(record["id"]), + "created_at": str(record["created_at"]) if record.get("created_at") else None, + "issue_id": str(record["issue_id"]), + "workspace_id": str(record["workspace_id"]), + "project_id": str(record["project_id"]), + "created_by_id": str(record["created_by_id"]), + "updated_by_id": str(record["updated_by_id"]), + "owned_by_id": str(record["owned_by_id"]), + "last_saved_at": ( + str(record["last_saved_at"]) if record.get("last_saved_at") else None + ), + "description_binary": record["description_binary"], + "description_html": record["description_html"], + "description_stripped": record["description_stripped"], + "description_json": record["description_json"], + "deleted_at": str(record["deleted_at"]) if record.get("deleted_at") else None, + } + + +# Queryset functions for each cleanup task +def get_api_logs_queryset(): + """Get API logs older than cutoff days.""" + cutoff_days = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 30)) + cutoff_time = timezone.now() - timedelta(days=cutoff_days) + logger.info(f"API logs cutoff time: {cutoff_time}") + + return ( + APIActivityLog.all_objects.filter(created_at__lte=cutoff_time) + .values( + "id", + "created_at", + "token_identifier", + "path", + "method", + "query_params", + "headers", + "body", + "response_code", + "response_body", + "ip_address", + "user_agent", + "created_by_id", + ) + .iterator(chunk_size=BATCH_SIZE) + ) + + +def get_email_logs_queryset(): + """Get email logs older than cutoff days.""" + cutoff_days = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 30)) + cutoff_time = timezone.now() - timedelta(days=cutoff_days) + logger.info(f"Email logs cutoff time: {cutoff_time}") + + return ( + EmailNotificationLog.all_objects.filter(sent_at__lte=cutoff_time) + .values( + "id", + "created_at", + "receiver_id", + "triggered_by_id", + "entity_identifier", + "entity_name", + "data", + "processed_at", + "sent_at", + "entity", + "old_value", + "new_value", + "created_by_id", + ) + .iterator(chunk_size=BATCH_SIZE) + ) + + +def get_page_versions_queryset(): + """Get page versions beyond the maximum allowed (20 per page).""" + subq = ( + PageVersion.all_objects.annotate( + row_num=Window( + expression=RowNumber(), + partition_by=[F("page_id")], + order_by=F("created_at").desc(), + ) + ) + .filter(row_num__gt=20) + .values("id") + ) + + return ( + PageVersion.all_objects.filter(id__in=Subquery(subq)) + .values( + "id", + "created_at", + "page_id", + "workspace_id", + "owned_by_id", + "description_html", + "description_binary", + "description_stripped", + "description_json", + "sub_pages_data", + "created_by_id", + "updated_by_id", + "deleted_at", + "last_saved_at", + ) + .iterator(chunk_size=BATCH_SIZE) + ) + + +def get_issue_description_versions_queryset(): + """Get issue description versions beyond the maximum allowed (20 per issue).""" + subq = ( + IssueDescriptionVersion.all_objects.annotate( + row_num=Window( + expression=RowNumber(), + partition_by=[F("issue_id")], + order_by=F("created_at").desc(), + ) + ) + .filter(row_num__gt=20) + .values("id") + ) + + return ( + IssueDescriptionVersion.all_objects.filter(id__in=Subquery(subq)) + .values( + "id", + "created_at", + "issue_id", + "workspace_id", + "project_id", + "created_by_id", + "updated_by_id", + "owned_by_id", + "last_saved_at", + "description_binary", + "description_html", + "description_stripped", + "description_json", + "deleted_at", + ) + .iterator(chunk_size=BATCH_SIZE) + ) + + +# Celery tasks - now much simpler! +@shared_task +def delete_api_logs(): + """Delete old API activity logs.""" + process_cleanup_task( + queryset_func=get_api_logs_queryset, + transform_func=transform_api_log, + model=APIActivityLog, + task_name="API Activity Log", + collection_name="api_activity_logs", + ) + + +@shared_task +def delete_email_notification_logs(): + """Delete old email notification logs.""" + process_cleanup_task( + queryset_func=get_email_logs_queryset, + transform_func=transform_email_log, + model=EmailNotificationLog, + task_name="Email Notification Log", + collection_name="email_notification_logs", + ) + + +@shared_task +def delete_page_versions(): + """Delete excess page versions.""" + process_cleanup_task( + queryset_func=get_page_versions_queryset, + transform_func=transform_page_version, + model=PageVersion, + task_name="Page Version", + collection_name="page_versions", + ) + + +@shared_task +def delete_issue_description_versions(): + """Delete excess issue description versions.""" + process_cleanup_task( + queryset_func=get_issue_description_versions_queryset, + transform_func=transform_issue_description_version, + model=IssueDescriptionVersion, + task_name="Issue Description Version", + collection_name="issue_description_versions", + ) diff --git a/apps/api/plane/bgtasks/email_notification_task.py b/apps/api/plane/bgtasks/email_notification_task.py index 5a601dcb8..141bb2f71 100644 --- a/apps/api/plane/bgtasks/email_notification_task.py +++ b/apps/api/plane/bgtasks/email_notification_task.py @@ -282,7 +282,7 @@ def send_email_notification( "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/", "workspace": str(issue.project.workspace.slug), "project": str(issue.project.name), - "user_preference": f"{base_api}/profile/preferences/email", + "user_preference": f"{base_api}/{str(issue.project.workspace.slug)}/settings/account/notifications/", "comments": comments, "entity_type": "issue", } diff --git a/apps/api/plane/bgtasks/issue_activities_task.py b/apps/api/plane/bgtasks/issue_activities_task.py index 4def8e8ca..f768feac3 100644 --- a/apps/api/plane/bgtasks/issue_activities_task.py +++ b/apps/api/plane/bgtasks/issue_activities_task.py @@ -30,7 +30,6 @@ from plane.db.models import ( ) from plane.settings.redis import redis_instance from plane.utils.exception_logger import log_exception -from plane.bgtasks.webhook_task import webhook_activity from plane.utils.issue_relation_mapper import get_inverse_relation from plane.utils.uuid import is_valid_uuid diff --git a/apps/api/plane/bgtasks/page_version_task.py b/apps/api/plane/bgtasks/page_version_task.py index 7a5f94c9e..ec1f6c3ca 100644 --- a/apps/api/plane/bgtasks/page_version_task.py +++ b/apps/api/plane/bgtasks/page_version_task.py @@ -30,6 +30,8 @@ def page_version(page_id, existing_instance, user_id): description_binary=page.description_binary, owned_by_id=user_id, last_saved_at=page.updated_at, + description_json=page.description, + description_stripped=page.description_stripped, ) # If page versions are greater than 20 delete the oldest one diff --git a/apps/api/plane/celery.py b/apps/api/plane/celery.py index 0ffa4689b..2eeac358c 100644 --- a/apps/api/plane/celery.py +++ b/apps/api/plane/celery.py @@ -50,9 +50,21 @@ app.conf.beat_schedule = { "schedule": crontab(hour=2, minute=0), # UTC 02:00 }, "check-every-day-to-delete-api-logs": { - "task": "plane.bgtasks.api_logs_task.delete_api_logs", + "task": "plane.bgtasks.cleanup_task.delete_api_logs", "schedule": crontab(hour=2, minute=30), # UTC 02:30 }, + "check-every-day-to-delete-email-notification-logs": { + "task": "plane.bgtasks.cleanup_task.delete_email_notification_logs", + "schedule": crontab(hour=3, minute=0), # UTC 03:00 + }, + "check-every-day-to-delete-page-versions": { + "task": "plane.bgtasks.cleanup_task.delete_page_versions", + "schedule": crontab(hour=3, minute=30), # UTC 03:30 + }, + "check-every-day-to-delete-issue-description-versions": { + "task": "plane.bgtasks.cleanup_task.delete_issue_description_versions", + "schedule": crontab(hour=4, minute=0), # UTC 04:00 + }, } diff --git a/apps/api/plane/db/management/commands/update_deleted_workspace_slug.py b/apps/api/plane/db/management/commands/update_deleted_workspace_slug.py index 48600e662..f4a9285ee 100644 --- a/apps/api/plane/db/management/commands/update_deleted_workspace_slug.py +++ b/apps/api/plane/db/management/commands/update_deleted_workspace_slug.py @@ -1,4 +1,3 @@ -import time from django.core.management.base import BaseCommand from django.db import transaction from plane.db.models import Workspace diff --git a/apps/api/plane/db/migrations/0099_profile_background_color_profile_goals_and_more.py b/apps/api/plane/db/migrations/0099_profile_background_color_profile_goals_and_more.py new file mode 100644 index 000000000..cc64d3a3c --- /dev/null +++ b/apps/api/plane/db/migrations/0099_profile_background_color_profile_goals_and_more.py @@ -0,0 +1,34 @@ +# Generated by Django 4.2.22 on 2025-07-27 16:01 + + +from django.db import migrations, models +import plane.utils.color + + +class Migration(migrations.Migration): + + dependencies = [ + ("db", "0098_profile_is_app_rail_docked_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="profile", + name="background_color", + field=models.CharField( + default=plane.utils.color.get_random_color, max_length=255 + ), + ), + migrations.AddField( + model_name="profile", + name="goals", + field=models.JSONField(default=dict), + ), + migrations.AddField( + model_name="workspace", + name="background_color", + field=models.CharField( + default=plane.utils.color.get_random_color, max_length=255 + ), + ), + ] diff --git a/apps/api/plane/db/migrations/0100_profile_has_marketing_email_consent_and_more.py b/apps/api/plane/db/migrations/0100_profile_has_marketing_email_consent_and_more.py new file mode 100644 index 000000000..674ca455d --- /dev/null +++ b/apps/api/plane/db/migrations/0100_profile_has_marketing_email_consent_and_more.py @@ -0,0 +1,1826 @@ +# Generated by Django 4.2.22 on 2025-07-30 08:13 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("db", "0099_profile_background_color_profile_goals_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="profile", + name="has_marketing_email_consent", + field=models.BooleanField(default=False), + ), + migrations.AlterField( + model_name="cycle", + name="timezone", + field=models.CharField( + choices=[ + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/Kralendijk", "America/Kralendijk"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Lower_Princes", "America/Lower_Princes"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Marigot", "America/Marigot"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Barthelemy", "America/St_Barthelemy"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Arctic/Longyearbyen", "Arctic/Longyearbyen"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Canada/Atlantic", "Canada/Atlantic"), + ("Canada/Central", "Canada/Central"), + ("Canada/Eastern", "Canada/Eastern"), + ("Canada/Mountain", "Canada/Mountain"), + ("Canada/Newfoundland", "Canada/Newfoundland"), + ("Canada/Pacific", "Canada/Pacific"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Bratislava", "Europe/Bratislava"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Busingen", "Europe/Busingen"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Guernsey", "Europe/Guernsey"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Isle_of_Man", "Europe/Isle_of_Man"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Jersey", "Europe/Jersey"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/Ljubljana", "Europe/Ljubljana"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Mariehamn", "Europe/Mariehamn"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Podgorica", "Europe/Podgorica"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/San_Marino", "Europe/San_Marino"), + ("Europe/Sarajevo", "Europe/Sarajevo"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Skopje", "Europe/Skopje"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vatican", "Europe/Vatican"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zagreb", "Europe/Zagreb"), + ("Europe/Zurich", "Europe/Zurich"), + ("GMT", "GMT"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("US/Alaska", "US/Alaska"), + ("US/Arizona", "US/Arizona"), + ("US/Central", "US/Central"), + ("US/Eastern", "US/Eastern"), + ("US/Hawaii", "US/Hawaii"), + ("US/Mountain", "US/Mountain"), + ("US/Pacific", "US/Pacific"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + migrations.AlterField( + model_name="project", + name="timezone", + field=models.CharField( + choices=[ + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/Kralendijk", "America/Kralendijk"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Lower_Princes", "America/Lower_Princes"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Marigot", "America/Marigot"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Barthelemy", "America/St_Barthelemy"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Arctic/Longyearbyen", "Arctic/Longyearbyen"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Canada/Atlantic", "Canada/Atlantic"), + ("Canada/Central", "Canada/Central"), + ("Canada/Eastern", "Canada/Eastern"), + ("Canada/Mountain", "Canada/Mountain"), + ("Canada/Newfoundland", "Canada/Newfoundland"), + ("Canada/Pacific", "Canada/Pacific"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Bratislava", "Europe/Bratislava"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Busingen", "Europe/Busingen"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Guernsey", "Europe/Guernsey"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Isle_of_Man", "Europe/Isle_of_Man"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Jersey", "Europe/Jersey"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/Ljubljana", "Europe/Ljubljana"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Mariehamn", "Europe/Mariehamn"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Podgorica", "Europe/Podgorica"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/San_Marino", "Europe/San_Marino"), + ("Europe/Sarajevo", "Europe/Sarajevo"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Skopje", "Europe/Skopje"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vatican", "Europe/Vatican"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zagreb", "Europe/Zagreb"), + ("Europe/Zurich", "Europe/Zurich"), + ("GMT", "GMT"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("US/Alaska", "US/Alaska"), + ("US/Arizona", "US/Arizona"), + ("US/Central", "US/Central"), + ("US/Eastern", "US/Eastern"), + ("US/Hawaii", "US/Hawaii"), + ("US/Mountain", "US/Mountain"), + ("US/Pacific", "US/Pacific"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + migrations.AlterField( + model_name="user", + name="user_timezone", + field=models.CharField( + choices=[ + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/Kralendijk", "America/Kralendijk"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Lower_Princes", "America/Lower_Princes"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Marigot", "America/Marigot"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Barthelemy", "America/St_Barthelemy"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Arctic/Longyearbyen", "Arctic/Longyearbyen"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Canada/Atlantic", "Canada/Atlantic"), + ("Canada/Central", "Canada/Central"), + ("Canada/Eastern", "Canada/Eastern"), + ("Canada/Mountain", "Canada/Mountain"), + ("Canada/Newfoundland", "Canada/Newfoundland"), + ("Canada/Pacific", "Canada/Pacific"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Bratislava", "Europe/Bratislava"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Busingen", "Europe/Busingen"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Guernsey", "Europe/Guernsey"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Isle_of_Man", "Europe/Isle_of_Man"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Jersey", "Europe/Jersey"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/Ljubljana", "Europe/Ljubljana"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Mariehamn", "Europe/Mariehamn"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Podgorica", "Europe/Podgorica"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/San_Marino", "Europe/San_Marino"), + ("Europe/Sarajevo", "Europe/Sarajevo"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Skopje", "Europe/Skopje"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vatican", "Europe/Vatican"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zagreb", "Europe/Zagreb"), + ("Europe/Zurich", "Europe/Zurich"), + ("GMT", "GMT"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("US/Alaska", "US/Alaska"), + ("US/Arizona", "US/Arizona"), + ("US/Central", "US/Central"), + ("US/Eastern", "US/Eastern"), + ("US/Hawaii", "US/Hawaii"), + ("US/Mountain", "US/Mountain"), + ("US/Pacific", "US/Pacific"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + migrations.AlterField( + model_name="workspace", + name="timezone", + field=models.CharField( + choices=[ + ("Africa/Abidjan", "Africa/Abidjan"), + ("Africa/Accra", "Africa/Accra"), + ("Africa/Addis_Ababa", "Africa/Addis_Ababa"), + ("Africa/Algiers", "Africa/Algiers"), + ("Africa/Asmara", "Africa/Asmara"), + ("Africa/Bamako", "Africa/Bamako"), + ("Africa/Bangui", "Africa/Bangui"), + ("Africa/Banjul", "Africa/Banjul"), + ("Africa/Bissau", "Africa/Bissau"), + ("Africa/Blantyre", "Africa/Blantyre"), + ("Africa/Brazzaville", "Africa/Brazzaville"), + ("Africa/Bujumbura", "Africa/Bujumbura"), + ("Africa/Cairo", "Africa/Cairo"), + ("Africa/Casablanca", "Africa/Casablanca"), + ("Africa/Ceuta", "Africa/Ceuta"), + ("Africa/Conakry", "Africa/Conakry"), + ("Africa/Dakar", "Africa/Dakar"), + ("Africa/Dar_es_Salaam", "Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "Africa/Djibouti"), + ("Africa/Douala", "Africa/Douala"), + ("Africa/El_Aaiun", "Africa/El_Aaiun"), + ("Africa/Freetown", "Africa/Freetown"), + ("Africa/Gaborone", "Africa/Gaborone"), + ("Africa/Harare", "Africa/Harare"), + ("Africa/Johannesburg", "Africa/Johannesburg"), + ("Africa/Juba", "Africa/Juba"), + ("Africa/Kampala", "Africa/Kampala"), + ("Africa/Khartoum", "Africa/Khartoum"), + ("Africa/Kigali", "Africa/Kigali"), + ("Africa/Kinshasa", "Africa/Kinshasa"), + ("Africa/Lagos", "Africa/Lagos"), + ("Africa/Libreville", "Africa/Libreville"), + ("Africa/Lome", "Africa/Lome"), + ("Africa/Luanda", "Africa/Luanda"), + ("Africa/Lubumbashi", "Africa/Lubumbashi"), + ("Africa/Lusaka", "Africa/Lusaka"), + ("Africa/Malabo", "Africa/Malabo"), + ("Africa/Maputo", "Africa/Maputo"), + ("Africa/Maseru", "Africa/Maseru"), + ("Africa/Mbabane", "Africa/Mbabane"), + ("Africa/Mogadishu", "Africa/Mogadishu"), + ("Africa/Monrovia", "Africa/Monrovia"), + ("Africa/Nairobi", "Africa/Nairobi"), + ("Africa/Ndjamena", "Africa/Ndjamena"), + ("Africa/Niamey", "Africa/Niamey"), + ("Africa/Nouakchott", "Africa/Nouakchott"), + ("Africa/Ouagadougou", "Africa/Ouagadougou"), + ("Africa/Porto-Novo", "Africa/Porto-Novo"), + ("Africa/Sao_Tome", "Africa/Sao_Tome"), + ("Africa/Tripoli", "Africa/Tripoli"), + ("Africa/Tunis", "Africa/Tunis"), + ("Africa/Windhoek", "Africa/Windhoek"), + ("America/Adak", "America/Adak"), + ("America/Anchorage", "America/Anchorage"), + ("America/Anguilla", "America/Anguilla"), + ("America/Antigua", "America/Antigua"), + ("America/Araguaina", "America/Araguaina"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), + ("America/Argentina/Salta", "America/Argentina/Salta"), + ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), + ("America/Aruba", "America/Aruba"), + ("America/Asuncion", "America/Asuncion"), + ("America/Atikokan", "America/Atikokan"), + ("America/Bahia", "America/Bahia"), + ("America/Bahia_Banderas", "America/Bahia_Banderas"), + ("America/Barbados", "America/Barbados"), + ("America/Belem", "America/Belem"), + ("America/Belize", "America/Belize"), + ("America/Blanc-Sablon", "America/Blanc-Sablon"), + ("America/Boa_Vista", "America/Boa_Vista"), + ("America/Bogota", "America/Bogota"), + ("America/Boise", "America/Boise"), + ("America/Cambridge_Bay", "America/Cambridge_Bay"), + ("America/Campo_Grande", "America/Campo_Grande"), + ("America/Cancun", "America/Cancun"), + ("America/Caracas", "America/Caracas"), + ("America/Cayenne", "America/Cayenne"), + ("America/Cayman", "America/Cayman"), + ("America/Chicago", "America/Chicago"), + ("America/Chihuahua", "America/Chihuahua"), + ("America/Ciudad_Juarez", "America/Ciudad_Juarez"), + ("America/Costa_Rica", "America/Costa_Rica"), + ("America/Creston", "America/Creston"), + ("America/Cuiaba", "America/Cuiaba"), + ("America/Curacao", "America/Curacao"), + ("America/Danmarkshavn", "America/Danmarkshavn"), + ("America/Dawson", "America/Dawson"), + ("America/Dawson_Creek", "America/Dawson_Creek"), + ("America/Denver", "America/Denver"), + ("America/Detroit", "America/Detroit"), + ("America/Dominica", "America/Dominica"), + ("America/Edmonton", "America/Edmonton"), + ("America/Eirunepe", "America/Eirunepe"), + ("America/El_Salvador", "America/El_Salvador"), + ("America/Fort_Nelson", "America/Fort_Nelson"), + ("America/Fortaleza", "America/Fortaleza"), + ("America/Glace_Bay", "America/Glace_Bay"), + ("America/Goose_Bay", "America/Goose_Bay"), + ("America/Grand_Turk", "America/Grand_Turk"), + ("America/Grenada", "America/Grenada"), + ("America/Guadeloupe", "America/Guadeloupe"), + ("America/Guatemala", "America/Guatemala"), + ("America/Guayaquil", "America/Guayaquil"), + ("America/Guyana", "America/Guyana"), + ("America/Halifax", "America/Halifax"), + ("America/Havana", "America/Havana"), + ("America/Hermosillo", "America/Hermosillo"), + ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "America/Indiana/Knox"), + ("America/Indiana/Marengo", "America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "America/Indiana/Winamac"), + ("America/Inuvik", "America/Inuvik"), + ("America/Iqaluit", "America/Iqaluit"), + ("America/Jamaica", "America/Jamaica"), + ("America/Juneau", "America/Juneau"), + ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ("America/Kralendijk", "America/Kralendijk"), + ("America/La_Paz", "America/La_Paz"), + ("America/Lima", "America/Lima"), + ("America/Los_Angeles", "America/Los_Angeles"), + ("America/Lower_Princes", "America/Lower_Princes"), + ("America/Maceio", "America/Maceio"), + ("America/Managua", "America/Managua"), + ("America/Manaus", "America/Manaus"), + ("America/Marigot", "America/Marigot"), + ("America/Martinique", "America/Martinique"), + ("America/Matamoros", "America/Matamoros"), + ("America/Mazatlan", "America/Mazatlan"), + ("America/Menominee", "America/Menominee"), + ("America/Merida", "America/Merida"), + ("America/Metlakatla", "America/Metlakatla"), + ("America/Mexico_City", "America/Mexico_City"), + ("America/Miquelon", "America/Miquelon"), + ("America/Moncton", "America/Moncton"), + ("America/Monterrey", "America/Monterrey"), + ("America/Montevideo", "America/Montevideo"), + ("America/Montserrat", "America/Montserrat"), + ("America/Nassau", "America/Nassau"), + ("America/New_York", "America/New_York"), + ("America/Nome", "America/Nome"), + ("America/Noronha", "America/Noronha"), + ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "America/North_Dakota/Center"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), + ("America/Nuuk", "America/Nuuk"), + ("America/Ojinaga", "America/Ojinaga"), + ("America/Panama", "America/Panama"), + ("America/Paramaribo", "America/Paramaribo"), + ("America/Phoenix", "America/Phoenix"), + ("America/Port-au-Prince", "America/Port-au-Prince"), + ("America/Port_of_Spain", "America/Port_of_Spain"), + ("America/Porto_Velho", "America/Porto_Velho"), + ("America/Puerto_Rico", "America/Puerto_Rico"), + ("America/Punta_Arenas", "America/Punta_Arenas"), + ("America/Rankin_Inlet", "America/Rankin_Inlet"), + ("America/Recife", "America/Recife"), + ("America/Regina", "America/Regina"), + ("America/Resolute", "America/Resolute"), + ("America/Rio_Branco", "America/Rio_Branco"), + ("America/Santarem", "America/Santarem"), + ("America/Santiago", "America/Santiago"), + ("America/Santo_Domingo", "America/Santo_Domingo"), + ("America/Sao_Paulo", "America/Sao_Paulo"), + ("America/Scoresbysund", "America/Scoresbysund"), + ("America/Sitka", "America/Sitka"), + ("America/St_Barthelemy", "America/St_Barthelemy"), + ("America/St_Johns", "America/St_Johns"), + ("America/St_Kitts", "America/St_Kitts"), + ("America/St_Lucia", "America/St_Lucia"), + ("America/St_Thomas", "America/St_Thomas"), + ("America/St_Vincent", "America/St_Vincent"), + ("America/Swift_Current", "America/Swift_Current"), + ("America/Tegucigalpa", "America/Tegucigalpa"), + ("America/Thule", "America/Thule"), + ("America/Tijuana", "America/Tijuana"), + ("America/Toronto", "America/Toronto"), + ("America/Tortola", "America/Tortola"), + ("America/Vancouver", "America/Vancouver"), + ("America/Whitehorse", "America/Whitehorse"), + ("America/Winnipeg", "America/Winnipeg"), + ("America/Yakutat", "America/Yakutat"), + ("Antarctica/Casey", "Antarctica/Casey"), + ("Antarctica/Davis", "Antarctica/Davis"), + ("Antarctica/DumontDUrville", "Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "Antarctica/Macquarie"), + ("Antarctica/Mawson", "Antarctica/Mawson"), + ("Antarctica/McMurdo", "Antarctica/McMurdo"), + ("Antarctica/Palmer", "Antarctica/Palmer"), + ("Antarctica/Rothera", "Antarctica/Rothera"), + ("Antarctica/Syowa", "Antarctica/Syowa"), + ("Antarctica/Troll", "Antarctica/Troll"), + ("Antarctica/Vostok", "Antarctica/Vostok"), + ("Arctic/Longyearbyen", "Arctic/Longyearbyen"), + ("Asia/Aden", "Asia/Aden"), + ("Asia/Almaty", "Asia/Almaty"), + ("Asia/Amman", "Asia/Amman"), + ("Asia/Anadyr", "Asia/Anadyr"), + ("Asia/Aqtau", "Asia/Aqtau"), + ("Asia/Aqtobe", "Asia/Aqtobe"), + ("Asia/Ashgabat", "Asia/Ashgabat"), + ("Asia/Atyrau", "Asia/Atyrau"), + ("Asia/Baghdad", "Asia/Baghdad"), + ("Asia/Bahrain", "Asia/Bahrain"), + ("Asia/Baku", "Asia/Baku"), + ("Asia/Bangkok", "Asia/Bangkok"), + ("Asia/Barnaul", "Asia/Barnaul"), + ("Asia/Beirut", "Asia/Beirut"), + ("Asia/Bishkek", "Asia/Bishkek"), + ("Asia/Brunei", "Asia/Brunei"), + ("Asia/Chita", "Asia/Chita"), + ("Asia/Choibalsan", "Asia/Choibalsan"), + ("Asia/Colombo", "Asia/Colombo"), + ("Asia/Damascus", "Asia/Damascus"), + ("Asia/Dhaka", "Asia/Dhaka"), + ("Asia/Dili", "Asia/Dili"), + ("Asia/Dubai", "Asia/Dubai"), + ("Asia/Dushanbe", "Asia/Dushanbe"), + ("Asia/Famagusta", "Asia/Famagusta"), + ("Asia/Gaza", "Asia/Gaza"), + ("Asia/Hebron", "Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "Asia/Hong_Kong"), + ("Asia/Hovd", "Asia/Hovd"), + ("Asia/Irkutsk", "Asia/Irkutsk"), + ("Asia/Jakarta", "Asia/Jakarta"), + ("Asia/Jayapura", "Asia/Jayapura"), + ("Asia/Jerusalem", "Asia/Jerusalem"), + ("Asia/Kabul", "Asia/Kabul"), + ("Asia/Kamchatka", "Asia/Kamchatka"), + ("Asia/Karachi", "Asia/Karachi"), + ("Asia/Kathmandu", "Asia/Kathmandu"), + ("Asia/Khandyga", "Asia/Khandyga"), + ("Asia/Kolkata", "Asia/Kolkata"), + ("Asia/Krasnoyarsk", "Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "Asia/Kuala_Lumpur"), + ("Asia/Kuching", "Asia/Kuching"), + ("Asia/Kuwait", "Asia/Kuwait"), + ("Asia/Macau", "Asia/Macau"), + ("Asia/Magadan", "Asia/Magadan"), + ("Asia/Makassar", "Asia/Makassar"), + ("Asia/Manila", "Asia/Manila"), + ("Asia/Muscat", "Asia/Muscat"), + ("Asia/Nicosia", "Asia/Nicosia"), + ("Asia/Novokuznetsk", "Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "Asia/Novosibirsk"), + ("Asia/Omsk", "Asia/Omsk"), + ("Asia/Oral", "Asia/Oral"), + ("Asia/Phnom_Penh", "Asia/Phnom_Penh"), + ("Asia/Pontianak", "Asia/Pontianak"), + ("Asia/Pyongyang", "Asia/Pyongyang"), + ("Asia/Qatar", "Asia/Qatar"), + ("Asia/Qostanay", "Asia/Qostanay"), + ("Asia/Qyzylorda", "Asia/Qyzylorda"), + ("Asia/Riyadh", "Asia/Riyadh"), + ("Asia/Sakhalin", "Asia/Sakhalin"), + ("Asia/Samarkand", "Asia/Samarkand"), + ("Asia/Seoul", "Asia/Seoul"), + ("Asia/Shanghai", "Asia/Shanghai"), + ("Asia/Singapore", "Asia/Singapore"), + ("Asia/Srednekolymsk", "Asia/Srednekolymsk"), + ("Asia/Taipei", "Asia/Taipei"), + ("Asia/Tashkent", "Asia/Tashkent"), + ("Asia/Tbilisi", "Asia/Tbilisi"), + ("Asia/Tehran", "Asia/Tehran"), + ("Asia/Thimphu", "Asia/Thimphu"), + ("Asia/Tokyo", "Asia/Tokyo"), + ("Asia/Tomsk", "Asia/Tomsk"), + ("Asia/Ulaanbaatar", "Asia/Ulaanbaatar"), + ("Asia/Urumqi", "Asia/Urumqi"), + ("Asia/Ust-Nera", "Asia/Ust-Nera"), + ("Asia/Vientiane", "Asia/Vientiane"), + ("Asia/Vladivostok", "Asia/Vladivostok"), + ("Asia/Yakutsk", "Asia/Yakutsk"), + ("Asia/Yangon", "Asia/Yangon"), + ("Asia/Yekaterinburg", "Asia/Yekaterinburg"), + ("Asia/Yerevan", "Asia/Yerevan"), + ("Atlantic/Azores", "Atlantic/Azores"), + ("Atlantic/Bermuda", "Atlantic/Bermuda"), + ("Atlantic/Canary", "Atlantic/Canary"), + ("Atlantic/Cape_Verde", "Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "Atlantic/Faroe"), + ("Atlantic/Madeira", "Atlantic/Madeira"), + ("Atlantic/Reykjavik", "Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "Atlantic/St_Helena"), + ("Atlantic/Stanley", "Atlantic/Stanley"), + ("Australia/Adelaide", "Australia/Adelaide"), + ("Australia/Brisbane", "Australia/Brisbane"), + ("Australia/Broken_Hill", "Australia/Broken_Hill"), + ("Australia/Darwin", "Australia/Darwin"), + ("Australia/Eucla", "Australia/Eucla"), + ("Australia/Hobart", "Australia/Hobart"), + ("Australia/Lindeman", "Australia/Lindeman"), + ("Australia/Lord_Howe", "Australia/Lord_Howe"), + ("Australia/Melbourne", "Australia/Melbourne"), + ("Australia/Perth", "Australia/Perth"), + ("Australia/Sydney", "Australia/Sydney"), + ("Canada/Atlantic", "Canada/Atlantic"), + ("Canada/Central", "Canada/Central"), + ("Canada/Eastern", "Canada/Eastern"), + ("Canada/Mountain", "Canada/Mountain"), + ("Canada/Newfoundland", "Canada/Newfoundland"), + ("Canada/Pacific", "Canada/Pacific"), + ("Europe/Amsterdam", "Europe/Amsterdam"), + ("Europe/Andorra", "Europe/Andorra"), + ("Europe/Astrakhan", "Europe/Astrakhan"), + ("Europe/Athens", "Europe/Athens"), + ("Europe/Belgrade", "Europe/Belgrade"), + ("Europe/Berlin", "Europe/Berlin"), + ("Europe/Bratislava", "Europe/Bratislava"), + ("Europe/Brussels", "Europe/Brussels"), + ("Europe/Bucharest", "Europe/Bucharest"), + ("Europe/Budapest", "Europe/Budapest"), + ("Europe/Busingen", "Europe/Busingen"), + ("Europe/Chisinau", "Europe/Chisinau"), + ("Europe/Copenhagen", "Europe/Copenhagen"), + ("Europe/Dublin", "Europe/Dublin"), + ("Europe/Gibraltar", "Europe/Gibraltar"), + ("Europe/Guernsey", "Europe/Guernsey"), + ("Europe/Helsinki", "Europe/Helsinki"), + ("Europe/Isle_of_Man", "Europe/Isle_of_Man"), + ("Europe/Istanbul", "Europe/Istanbul"), + ("Europe/Jersey", "Europe/Jersey"), + ("Europe/Kaliningrad", "Europe/Kaliningrad"), + ("Europe/Kirov", "Europe/Kirov"), + ("Europe/Kyiv", "Europe/Kyiv"), + ("Europe/Lisbon", "Europe/Lisbon"), + ("Europe/Ljubljana", "Europe/Ljubljana"), + ("Europe/London", "Europe/London"), + ("Europe/Luxembourg", "Europe/Luxembourg"), + ("Europe/Madrid", "Europe/Madrid"), + ("Europe/Malta", "Europe/Malta"), + ("Europe/Mariehamn", "Europe/Mariehamn"), + ("Europe/Minsk", "Europe/Minsk"), + ("Europe/Monaco", "Europe/Monaco"), + ("Europe/Moscow", "Europe/Moscow"), + ("Europe/Oslo", "Europe/Oslo"), + ("Europe/Paris", "Europe/Paris"), + ("Europe/Podgorica", "Europe/Podgorica"), + ("Europe/Prague", "Europe/Prague"), + ("Europe/Riga", "Europe/Riga"), + ("Europe/Rome", "Europe/Rome"), + ("Europe/Samara", "Europe/Samara"), + ("Europe/San_Marino", "Europe/San_Marino"), + ("Europe/Sarajevo", "Europe/Sarajevo"), + ("Europe/Saratov", "Europe/Saratov"), + ("Europe/Simferopol", "Europe/Simferopol"), + ("Europe/Skopje", "Europe/Skopje"), + ("Europe/Sofia", "Europe/Sofia"), + ("Europe/Stockholm", "Europe/Stockholm"), + ("Europe/Tallinn", "Europe/Tallinn"), + ("Europe/Tirane", "Europe/Tirane"), + ("Europe/Ulyanovsk", "Europe/Ulyanovsk"), + ("Europe/Vaduz", "Europe/Vaduz"), + ("Europe/Vatican", "Europe/Vatican"), + ("Europe/Vienna", "Europe/Vienna"), + ("Europe/Vilnius", "Europe/Vilnius"), + ("Europe/Volgograd", "Europe/Volgograd"), + ("Europe/Warsaw", "Europe/Warsaw"), + ("Europe/Zagreb", "Europe/Zagreb"), + ("Europe/Zurich", "Europe/Zurich"), + ("GMT", "GMT"), + ("Indian/Antananarivo", "Indian/Antananarivo"), + ("Indian/Chagos", "Indian/Chagos"), + ("Indian/Christmas", "Indian/Christmas"), + ("Indian/Cocos", "Indian/Cocos"), + ("Indian/Comoro", "Indian/Comoro"), + ("Indian/Kerguelen", "Indian/Kerguelen"), + ("Indian/Mahe", "Indian/Mahe"), + ("Indian/Maldives", "Indian/Maldives"), + ("Indian/Mauritius", "Indian/Mauritius"), + ("Indian/Mayotte", "Indian/Mayotte"), + ("Indian/Reunion", "Indian/Reunion"), + ("Pacific/Apia", "Pacific/Apia"), + ("Pacific/Auckland", "Pacific/Auckland"), + ("Pacific/Bougainville", "Pacific/Bougainville"), + ("Pacific/Chatham", "Pacific/Chatham"), + ("Pacific/Chuuk", "Pacific/Chuuk"), + ("Pacific/Easter", "Pacific/Easter"), + ("Pacific/Efate", "Pacific/Efate"), + ("Pacific/Fakaofo", "Pacific/Fakaofo"), + ("Pacific/Fiji", "Pacific/Fiji"), + ("Pacific/Funafuti", "Pacific/Funafuti"), + ("Pacific/Galapagos", "Pacific/Galapagos"), + ("Pacific/Gambier", "Pacific/Gambier"), + ("Pacific/Guadalcanal", "Pacific/Guadalcanal"), + ("Pacific/Guam", "Pacific/Guam"), + ("Pacific/Honolulu", "Pacific/Honolulu"), + ("Pacific/Kanton", "Pacific/Kanton"), + ("Pacific/Kiritimati", "Pacific/Kiritimati"), + ("Pacific/Kosrae", "Pacific/Kosrae"), + ("Pacific/Kwajalein", "Pacific/Kwajalein"), + ("Pacific/Majuro", "Pacific/Majuro"), + ("Pacific/Marquesas", "Pacific/Marquesas"), + ("Pacific/Midway", "Pacific/Midway"), + ("Pacific/Nauru", "Pacific/Nauru"), + ("Pacific/Niue", "Pacific/Niue"), + ("Pacific/Norfolk", "Pacific/Norfolk"), + ("Pacific/Noumea", "Pacific/Noumea"), + ("Pacific/Pago_Pago", "Pacific/Pago_Pago"), + ("Pacific/Palau", "Pacific/Palau"), + ("Pacific/Pitcairn", "Pacific/Pitcairn"), + ("Pacific/Pohnpei", "Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "Pacific/Rarotonga"), + ("Pacific/Saipan", "Pacific/Saipan"), + ("Pacific/Tahiti", "Pacific/Tahiti"), + ("Pacific/Tarawa", "Pacific/Tarawa"), + ("Pacific/Tongatapu", "Pacific/Tongatapu"), + ("Pacific/Wake", "Pacific/Wake"), + ("Pacific/Wallis", "Pacific/Wallis"), + ("US/Alaska", "US/Alaska"), + ("US/Arizona", "US/Arizona"), + ("US/Central", "US/Central"), + ("US/Eastern", "US/Eastern"), + ("US/Hawaii", "US/Hawaii"), + ("US/Mountain", "US/Mountain"), + ("US/Pacific", "US/Pacific"), + ("UTC", "UTC"), + ], + default="UTC", + max_length=255, + ), + ), + ] diff --git a/apps/api/plane/db/migrations/0101_description_descriptionversion.py b/apps/api/plane/db/migrations/0101_description_descriptionversion.py new file mode 100644 index 000000000..fca305c39 --- /dev/null +++ b/apps/api/plane/db/migrations/0101_description_descriptionversion.py @@ -0,0 +1,182 @@ +# Generated by Django 4.2.21 on 2025-08-19 11:52 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ("db", "0100_profile_has_marketing_email_consent_and_more"), + ] + + operations = [ + migrations.CreateModel( + name="Description", + fields=[ + ( + "created_at", + models.DateTimeField(auto_now_add=True, verbose_name="Created At"), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, verbose_name="Last Modified At" + ), + ), + ( + "deleted_at", + models.DateTimeField( + blank=True, null=True, verbose_name="Deleted At" + ), + ), + ( + "id", + models.UUIDField( + db_index=True, + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, + ), + ), + ("description_json", models.JSONField(blank=True, default=dict)), + ("description_html", models.TextField(blank=True, default="

")), + ("description_binary", models.BinaryField(null=True)), + ("description_stripped", models.TextField(blank=True, null=True)), + ( + "created_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_created_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Created By", + ), + ), + ( + "project", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="project_%(class)s", + to="db.project", + ), + ), + ( + "updated_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_updated_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Last Modified By", + ), + ), + ( + "workspace", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="workspace_%(class)s", + to="db.workspace", + ), + ), + ], + options={ + "verbose_name": "Description", + "verbose_name_plural": "Descriptions", + "db_table": "descriptions", + "ordering": ("-created_at",), + }, + ), + migrations.CreateModel( + name="DescriptionVersion", + fields=[ + ( + "created_at", + models.DateTimeField(auto_now_add=True, verbose_name="Created At"), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, verbose_name="Last Modified At" + ), + ), + ( + "deleted_at", + models.DateTimeField( + blank=True, null=True, verbose_name="Deleted At" + ), + ), + ( + "id", + models.UUIDField( + db_index=True, + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, + ), + ), + ("description_json", models.JSONField(blank=True, default=dict)), + ("description_html", models.TextField(blank=True, default="

")), + ("description_binary", models.BinaryField(null=True)), + ("description_stripped", models.TextField(blank=True, null=True)), + ( + "created_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_created_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Created By", + ), + ), + ( + "description", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="versions", + to="db.description", + ), + ), + ( + "project", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="project_%(class)s", + to="db.project", + ), + ), + ( + "updated_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_updated_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Last Modified By", + ), + ), + ( + "workspace", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="workspace_%(class)s", + to="db.workspace", + ), + ), + ], + options={ + "verbose_name": "Description Version", + "verbose_name_plural": "Description Versions", + "db_table": "description_versions", + "ordering": ("-created_at",), + }, + ), + ] diff --git a/apps/api/plane/db/migrations/0102_page_sort_order_pagelog_entity_type_and_more.py b/apps/api/plane/db/migrations/0102_page_sort_order_pagelog_entity_type_and_more.py new file mode 100644 index 000000000..59908a96b --- /dev/null +++ b/apps/api/plane/db/migrations/0102_page_sort_order_pagelog_entity_type_and_more.py @@ -0,0 +1,30 @@ +# Generated by Django 4.2.22 on 2025-08-29 11:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("db", "0101_description_descriptionversion"), + ] + + operations = [ + migrations.AddField( + model_name="page", + name="sort_order", + field=models.FloatField(default=65535), + ), + migrations.AddField( + model_name="pagelog", + name="entity_type", + field=models.CharField( + blank=True, max_length=30, null=True, verbose_name="Entity Type" + ), + ), + migrations.AlterField( + model_name="pagelog", + name="entity_identifier", + field=models.UUIDField(blank=True, null=True), + ), + ] diff --git a/apps/api/plane/db/migrations/0103_fileasset_asset_entity_type_idx_and_more.py b/apps/api/plane/db/migrations/0103_fileasset_asset_entity_type_idx_and_more.py new file mode 100644 index 000000000..82deba462 --- /dev/null +++ b/apps/api/plane/db/migrations/0103_fileasset_asset_entity_type_idx_and_more.py @@ -0,0 +1,75 @@ +# Generated by Django 4.2.22 on 2025-09-01 14:33 + +from django.db import migrations, models +from django.contrib.postgres.operations import AddIndexConcurrently + + +class Migration(migrations.Migration): + atomic = False + + dependencies = [ + ('db', '0102_page_sort_order_pagelog_entity_type_and_more'), + ] + + operations = [ + AddIndexConcurrently( + model_name='fileasset', + index=models.Index(fields=['entity_type'], name='asset_entity_type_idx'), + ), + AddIndexConcurrently( + model_name='fileasset', + index=models.Index(fields=['entity_identifier'], name='asset_entity_identifier_idx'), + ), + AddIndexConcurrently( + model_name='fileasset', + index=models.Index(fields=['entity_type', 'entity_identifier'], name='asset_entity_idx'), + ), + AddIndexConcurrently( + model_name='notification', + index=models.Index(fields=['entity_identifier'], name='notif_entity_identifier_idx'), + ), + AddIndexConcurrently( + model_name='notification', + index=models.Index(fields=['entity_name'], name='notif_entity_name_idx'), + ), + AddIndexConcurrently( + model_name='notification', + index=models.Index(fields=['read_at'], name='notif_read_at_idx'), + ), + AddIndexConcurrently( + model_name='notification', + index=models.Index(fields=['receiver', 'read_at'], name='notif_entity_idx'), + ), + AddIndexConcurrently( + model_name='pagelog', + index=models.Index(fields=['entity_type'], name='pagelog_entity_type_idx'), + ), + AddIndexConcurrently( + model_name='pagelog', + index=models.Index(fields=['entity_identifier'], name='pagelog_entity_id_idx'), + ), + AddIndexConcurrently( + model_name='pagelog', + index=models.Index(fields=['entity_name'], name='pagelog_entity_name_idx'), + ), + AddIndexConcurrently( + model_name='pagelog', + index=models.Index(fields=['entity_type', 'entity_identifier'], name='pagelog_type_id_idx'), + ), + AddIndexConcurrently( + model_name='pagelog', + index=models.Index(fields=['entity_name', 'entity_identifier'], name='pagelog_name_id_idx'), + ), + AddIndexConcurrently( + model_name='userfavorite', + index=models.Index(fields=['entity_type'], name='fav_entity_type_idx'), + ), + AddIndexConcurrently( + model_name='userfavorite', + index=models.Index(fields=['entity_identifier'], name='fav_entity_identifier_idx'), + ), + AddIndexConcurrently( + model_name='userfavorite', + index=models.Index(fields=['entity_type', 'entity_identifier'], name='fav_entity_idx'), + ), + ] diff --git a/apps/api/plane/db/migrations/0104_cycleuserproperties_rich_filters_and_more.py b/apps/api/plane/db/migrations/0104_cycleuserproperties_rich_filters_and_more.py new file mode 100644 index 000000000..6344e3165 --- /dev/null +++ b/apps/api/plane/db/migrations/0104_cycleuserproperties_rich_filters_and_more.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.22 on 2025-09-03 05:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0103_fileasset_asset_entity_type_idx_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='cycleuserproperties', + name='rich_filters', + field=models.JSONField(default=dict), + ), + migrations.AddField( + model_name='exporterhistory', + name='rich_filters', + field=models.JSONField(blank=True, default=dict, null=True), + ), + migrations.AddField( + model_name='issueuserproperty', + name='rich_filters', + field=models.JSONField(default=dict), + ), + migrations.AddField( + model_name='issueview', + name='rich_filters', + field=models.JSONField(default=dict), + ), + migrations.AddField( + model_name='moduleuserproperties', + name='rich_filters', + field=models.JSONField(default=dict), + ), + migrations.AddField( + model_name='workspaceuserproperties', + name='rich_filters', + field=models.JSONField(default=dict), + ), + ] diff --git a/apps/api/plane/db/models/__init__.py b/apps/api/plane/db/models/__init__.py index 3cf46c919..de8af54e4 100644 --- a/apps/api/plane/db/models/__init__.py +++ b/apps/api/plane/db/models/__init__.py @@ -83,3 +83,5 @@ from .label import Label from .device import Device, DeviceSession from .sticky import Sticky + +from .description import Description, DescriptionVersion \ No newline at end of file diff --git a/apps/api/plane/db/models/asset.py b/apps/api/plane/db/models/asset.py index 9973d122f..965262482 100644 --- a/apps/api/plane/db/models/asset.py +++ b/apps/api/plane/db/models/asset.py @@ -76,6 +76,15 @@ class FileAsset(BaseModel): verbose_name_plural = "File Assets" db_table = "file_assets" ordering = ("-created_at",) + indexes = [ + models.Index(fields=["entity_type"], name="asset_entity_type_idx"), + models.Index( + fields=["entity_identifier"], name="asset_entity_identifier_idx" + ), + models.Index( + fields=["entity_type", "entity_identifier"], name="asset_entity_idx" + ), + ] def __str__(self): return str(self.asset) diff --git a/apps/api/plane/db/models/cycle.py b/apps/api/plane/db/models/cycle.py index 6449fd145..9e45028c5 100644 --- a/apps/api/plane/db/models/cycle.py +++ b/apps/api/plane/db/models/cycle.py @@ -71,7 +71,7 @@ class Cycle(ProjectBaseModel): archived_at = models.DateTimeField(null=True) logo_props = models.JSONField(default=dict) # timezone - TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones)) + TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones)) timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES) version = models.IntegerField(default=1) @@ -139,6 +139,7 @@ class CycleUserProperties(ProjectBaseModel): filters = models.JSONField(default=get_default_filters) display_filters = models.JSONField(default=get_default_display_filters) display_properties = models.JSONField(default=get_default_display_properties) + rich_filters = models.JSONField(default=dict) class Meta: unique_together = ["cycle", "user", "deleted_at"] diff --git a/apps/api/plane/db/models/description.py b/apps/api/plane/db/models/description.py new file mode 100644 index 000000000..24c15d395 --- /dev/null +++ b/apps/api/plane/db/models/description.py @@ -0,0 +1,56 @@ +from django.db import models +from django.utils.html import strip_tags +from .workspace import WorkspaceBaseModel + + +class Description(WorkspaceBaseModel): + + + description_json = models.JSONField(default=dict, blank=True) + description_html = models.TextField(blank=True, default="

") + description_binary = models.BinaryField(null=True) + description_stripped = models.TextField(blank=True, null=True) + + class Meta: + verbose_name = "Description" + verbose_name_plural = "Descriptions" + db_table = "descriptions" + ordering = ("-created_at",) + + def save(self, *args, **kwargs): + # Strip the html tags using html parser + self.description_stripped = ( + None + if (self.description_html == "" or self.description_html is None) + else strip_tags(self.description_html) + ) + super(Description, self).save(*args, **kwargs) + + +class DescriptionVersion(WorkspaceBaseModel): + """ + DescriptionVersion is a model used to store historical versions of a Description. + """ + + description = models.ForeignKey( + "db.Description", on_delete=models.CASCADE, related_name="versions" + ) + description_json = models.JSONField(default=dict, blank=True) + description_html = models.TextField(blank=True, default="

") + description_binary = models.BinaryField(null=True) + description_stripped = models.TextField(blank=True, null=True) + + class Meta: + verbose_name = "Description Version" + verbose_name_plural = "Description Versions" + db_table = "description_versions" + ordering = ("-created_at",) + + def save(self, *args, **kwargs): + # Strip the html tags using html parser + self.description_stripped = ( + None + if (self.description_html == "" or self.description_html is None) + else strip_tags(self.description_html) + ) + super(DescriptionVersion, self).save(*args, **kwargs) diff --git a/apps/api/plane/db/models/exporter.py b/apps/api/plane/db/models/exporter.py index 48d40a1aa..40c13576d 100644 --- a/apps/api/plane/db/models/exporter.py +++ b/apps/api/plane/db/models/exporter.py @@ -56,6 +56,7 @@ class ExporterHistory(BaseModel): related_name="workspace_exporters", ) filters = models.JSONField(blank=True, null=True) + rich_filters = models.JSONField(default=dict, blank=True, null=True) class Meta: verbose_name = "Exporter" diff --git a/apps/api/plane/db/models/favorite.py b/apps/api/plane/db/models/favorite.py index 680bf7e37..165072088 100644 --- a/apps/api/plane/db/models/favorite.py +++ b/apps/api/plane/db/models/favorite.py @@ -41,6 +41,15 @@ class UserFavorite(WorkspaceBaseModel): verbose_name_plural = "User Favorites" db_table = "user_favorites" ordering = ("-created_at",) + indexes = [ + models.Index(fields=["entity_type"], name="fav_entity_type_idx"), + models.Index( + fields=["entity_identifier"], name="fav_entity_identifier_idx" + ), + models.Index( + fields=["entity_type", "entity_identifier"], name="fav_entity_idx" + ), + ] def save(self, *args, **kwargs): if self._state.adding: diff --git a/apps/api/plane/db/models/intake.py b/apps/api/plane/db/models/intake.py index 2f698ae1b..c6c366c9e 100644 --- a/apps/api/plane/db/models/intake.py +++ b/apps/api/plane/db/models/intake.py @@ -35,6 +35,14 @@ class SourceType(models.TextChoices): IN_APP = "IN_APP" +class IntakeIssueStatus(models.IntegerChoices): + PENDING = -2 + REJECTED = -1 + SNOOZED = 0 + ACCEPTED = 1 + DUPLICATE = 2 + + class IntakeIssue(ProjectBaseModel): intake = models.ForeignKey( "db.Intake", related_name="issue_intake", on_delete=models.CASCADE diff --git a/apps/api/plane/db/models/issue.py b/apps/api/plane/db/models/issue.py index a3994d79e..b8efd6ae7 100644 --- a/apps/api/plane/db/models/issue.py +++ b/apps/api/plane/db/models/issue.py @@ -509,6 +509,7 @@ class IssueUserProperty(ProjectBaseModel): filters = models.JSONField(default=get_default_filters) display_filters = models.JSONField(default=get_default_display_filters) display_properties = models.JSONField(default=get_default_display_properties) + rich_filters = models.JSONField(default=dict) class Meta: verbose_name = "Issue User Property" diff --git a/apps/api/plane/db/models/module.py b/apps/api/plane/db/models/module.py index 6fba4d03c..897cf26b1 100644 --- a/apps/api/plane/db/models/module.py +++ b/apps/api/plane/db/models/module.py @@ -51,6 +51,15 @@ def get_default_display_properties(): } +class ModuleStatus(models.TextChoices): + BACKLOG = "backlog" + PLANNED = "planned" + IN_PROGRESS = "in-progress" + PAUSED = "paused" + COMPLETED = "completed" + CANCELLED = "cancelled" + + class Module(ProjectBaseModel): name = models.CharField(max_length=255, verbose_name="Module Name") description = models.TextField(verbose_name="Module Description", blank=True) @@ -198,6 +207,7 @@ class ModuleUserProperties(ProjectBaseModel): filters = models.JSONField(default=get_default_filters) display_filters = models.JSONField(default=get_default_display_filters) display_properties = models.JSONField(default=get_default_display_properties) + rich_filters = models.JSONField(default=dict) class Meta: unique_together = ["module", "user", "deleted_at"] diff --git a/apps/api/plane/db/models/notification.py b/apps/api/plane/db/models/notification.py index 2847c07cf..a57e288ab 100644 --- a/apps/api/plane/db/models/notification.py +++ b/apps/api/plane/db/models/notification.py @@ -39,6 +39,14 @@ class Notification(BaseModel): verbose_name_plural = "Notifications" db_table = "notifications" ordering = ("-created_at",) + indexes = [ + models.Index( + fields=["entity_identifier"], name="notif_entity_identifier_idx" + ), + models.Index(fields=["entity_name"], name="notif_entity_name_idx"), + models.Index(fields=["read_at"], name="notif_read_at_idx"), + models.Index(fields=["receiver", "read_at"], name="notif_entity_idx"), + ] def __str__(self): """Return name of the notifications""" diff --git a/apps/api/plane/db/models/page.py b/apps/api/plane/db/models/page.py index 30a641ef8..71fc49c45 100644 --- a/apps/api/plane/db/models/page.py +++ b/apps/api/plane/db/models/page.py @@ -57,6 +57,7 @@ class Page(BaseModel): ) moved_to_page = models.UUIDField(null=True, blank=True) moved_to_project = models.UUIDField(null=True, blank=True) + sort_order = models.FloatField(default=65535) external_id = models.CharField(max_length=255, null=True, blank=True) external_source = models.CharField(max_length=255, null=True, blank=True) @@ -98,8 +99,11 @@ class PageLog(BaseModel): ) transaction = models.UUIDField(default=uuid.uuid4) page = models.ForeignKey(Page, related_name="page_log", on_delete=models.CASCADE) - entity_identifier = models.UUIDField(null=True) + entity_identifier = models.UUIDField(null=True, blank=True) entity_name = models.CharField(max_length=30, verbose_name="Transaction Type") + entity_type = models.CharField( + max_length=30, verbose_name="Entity Type", null=True, blank=True + ) workspace = models.ForeignKey( "db.Workspace", on_delete=models.CASCADE, related_name="workspace_page_log" ) @@ -110,6 +114,17 @@ class PageLog(BaseModel): verbose_name_plural = "Page Logs" db_table = "page_logs" ordering = ("-created_at",) + indexes = [ + models.Index(fields=["entity_type"], name="pagelog_entity_type_idx"), + models.Index(fields=["entity_identifier"], name="pagelog_entity_id_idx"), + models.Index(fields=["entity_name"], name="pagelog_entity_name_idx"), + models.Index( + fields=["entity_type", "entity_identifier"], name="pagelog_type_id_idx" + ), + models.Index( + fields=["entity_name", "entity_identifier"], name="pagelog_name_id_idx" + ), + ] def __str__(self): return f"{self.page.name} {self.entity_name}" diff --git a/apps/api/plane/db/models/project.py b/apps/api/plane/db/models/project.py index 79a0707d3..e58f60e80 100644 --- a/apps/api/plane/db/models/project.py +++ b/apps/api/plane/db/models/project.py @@ -120,7 +120,7 @@ class Project(BaseModel): ) archived_at = models.DateTimeField(null=True) # timezone - TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones)) + TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones)) timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES) # external_id for imports external_source = models.CharField(max_length=255, null=True, blank=True) diff --git a/apps/api/plane/db/models/user.py b/apps/api/plane/db/models/user.py index b2613a427..2e7d2a7b8 100644 --- a/apps/api/plane/db/models/user.py +++ b/apps/api/plane/db/models/user.py @@ -15,6 +15,7 @@ from django.utils import timezone # Module imports from plane.db.models import FileAsset from ..mixins import TimeAuditModel +from plane.utils.color import get_random_color def get_default_onboarding(): @@ -101,7 +102,7 @@ class User(AbstractBaseUser, PermissionsMixin): ) # timezone - USER_TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones)) + USER_TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones)) user_timezone = models.CharField( max_length=255, default="UTC", choices=USER_TIMEZONE_CHOICES ) @@ -222,6 +223,11 @@ class Profile(TimeAuditModel): start_of_the_week = models.PositiveSmallIntegerField( choices=START_OF_THE_WEEK_CHOICES, default=SUNDAY ) + goals = models.JSONField(default=dict) + background_color = models.CharField(max_length=255, default=get_random_color) + + # marketing + has_marketing_email_consent = models.BooleanField(default=False) class Meta: verbose_name = "Profile" @@ -270,9 +276,9 @@ def create_user_notification(sender, instance, created, **kwargs): UserNotificationPreference.objects.create( user=instance, - property_change=False, - state_change=False, - comment=False, - mention=False, - issue_completed=False, + property_change=True, + state_change=True, + comment=True, + mention=True, + issue_completed=True, ) diff --git a/apps/api/plane/db/models/view.py b/apps/api/plane/db/models/view.py index c9182acce..87d22e44f 100644 --- a/apps/api/plane/db/models/view.py +++ b/apps/api/plane/db/models/view.py @@ -58,6 +58,7 @@ class IssueView(WorkspaceBaseModel): filters = models.JSONField(default=dict) display_filters = models.JSONField(default=get_default_display_filters) display_properties = models.JSONField(default=get_default_display_properties) + rich_filters = models.JSONField(default=dict) access = models.PositiveSmallIntegerField( default=1, choices=((0, "Private"), (1, "Public")) ) diff --git a/apps/api/plane/db/models/workspace.py b/apps/api/plane/db/models/workspace.py index 7e5103a70..75a45f72c 100644 --- a/apps/api/plane/db/models/workspace.py +++ b/apps/api/plane/db/models/workspace.py @@ -1,9 +1,6 @@ # Python imports -from django.db.models.functions import Ln import pytz -import time -from django.utils import timezone -from typing import Optional, Any, Tuple, Dict +from typing import Optional, Any # Django imports from django.conf import settings @@ -13,6 +10,7 @@ from django.db import models # Module imports from .base import BaseModel from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS +from plane.utils.color import get_random_color ROLE_CHOICES = ((20, "Admin"), (15, "Member"), (5, "Guest")) @@ -115,7 +113,7 @@ def slug_validator(value): class Workspace(BaseModel): - TIMEZONE_CHOICES = tuple(zip(pytz.all_timezones, pytz.all_timezones)) + TIMEZONE_CHOICES = tuple(zip(pytz.common_timezones, pytz.common_timezones)) name = models.CharField(max_length=80, verbose_name="Workspace Name") logo = models.TextField(verbose_name="Logo", blank=True, null=True) @@ -136,6 +134,7 @@ class Workspace(BaseModel): ) organization_size = models.CharField(max_length=20, blank=True, null=True) timezone = models.CharField(max_length=255, default="UTC", choices=TIMEZONE_CHOICES) + background_color = models.CharField(max_length=255, default=get_random_color) def __str__(self): """Return name of the Workspace""" @@ -333,6 +332,7 @@ class WorkspaceUserProperties(BaseModel): filters = models.JSONField(default=get_default_filters) display_filters = models.JSONField(default=get_default_display_filters) display_properties = models.JSONField(default=get_default_display_properties) + rich_filters = models.JSONField(default=dict) class Meta: unique_together = ["workspace", "user", "deleted_at"] diff --git a/apps/api/plane/license/management/commands/register_instance.py b/apps/api/plane/license/management/commands/register_instance.py index 692ca350f..6717cafd1 100644 --- a/apps/api/plane/license/management/commands/register_instance.py +++ b/apps/api/plane/license/management/commands/register_instance.py @@ -2,11 +2,12 @@ import json import secrets import os +import requests # Django imports from django.core.management.base import BaseCommand, CommandError from django.utils import timezone -from django.conf import settings + # Module imports from plane.license.models import Instance, InstanceEdition @@ -20,21 +21,38 @@ class Command(BaseCommand): # Positional argument parser.add_argument("machine_signature", type=str, help="Machine signature") - def read_package_json(self): - with open("package.json", "r") as file: - # Load JSON content from the file - data = json.load(file) + def check_for_current_version(self): + if os.environ.get("APP_VERSION", False): + return os.environ.get("APP_VERSION") - payload = { - "instance_key": settings.INSTANCE_KEY, - "version": data.get("version", 0.1), - } - return payload + try: + with open("package.json", "r") as file: + data = json.load(file) + return data.get("version", "v0.1.0") + except Exception: + self.stdout.write("Error checking for current version") + return "v0.1.0" + + def check_for_latest_version(self, fallback_version): + try: + response = requests.get( + "https://api.github.com/repos/makeplane/plane/releases/latest", + timeout=10, + ) + response.raise_for_status() + data = response.json() + return data.get("tag_name", fallback_version) + except Exception: + self.stdout.write("Error checking for latest version") + return fallback_version def handle(self, *args, **options): # Check if the instance is registered instance = Instance.objects.first() + current_version = self.check_for_current_version() + latest_version = self.check_for_latest_version(current_version) + # If instance is None then register this instance if instance is None: machine_signature = options.get("machine_signature", "machine-signature") @@ -42,13 +60,11 @@ class Command(BaseCommand): if not machine_signature: raise CommandError("Machine signature is required") - payload = self.read_package_json() - instance = Instance.objects.create( instance_name="Plane Community Edition", instance_id=secrets.token_hex(12), - current_version=payload.get("version"), - latest_version=payload.get("version"), + current_version=current_version, + latest_version=latest_version, last_checked_at=timezone.now(), is_test=os.environ.get("IS_TEST", "0") == "1", edition=InstanceEdition.PLANE_COMMUNITY.value, @@ -57,11 +73,11 @@ class Command(BaseCommand): self.stdout.write(self.style.SUCCESS("Instance registered")) else: self.stdout.write(self.style.SUCCESS("Instance already registered")) - payload = self.read_package_json() + # Update the instance details instance.last_checked_at = timezone.now() - instance.current_version = payload.get("version") - instance.latest_version = payload.get("version") + instance.current_version = current_version + instance.latest_version = latest_version instance.is_test = os.environ.get("IS_TEST", "0") == "1" instance.edition = InstanceEdition.PLANE_COMMUNITY.value instance.save() diff --git a/apps/api/plane/middleware/db_routing.py b/apps/api/plane/middleware/db_routing.py new file mode 100644 index 000000000..dc7ff3fa3 --- /dev/null +++ b/apps/api/plane/middleware/db_routing.py @@ -0,0 +1,162 @@ +""" +Database routing middleware for read replica selection. +This middleware determines whether database queries should be routed to +read replicas or the primary database based on HTTP method and view configuration. +""" + +import logging +from typing import Callable, Optional + +from django.http import HttpRequest, HttpResponse + +from plane.utils.core import ( + set_use_read_replica, + clear_read_replica_context, +) + +logger = logging.getLogger("plane.api") + + +class ReadReplicaRoutingMiddleware: + """ + Middleware for intelligent database routing to read replicas. + Routing Logic: + • Non-GET requests (POST, PUT, DELETE, PATCH) ➜ Primary database + • GET requests: + - View has use_read_replica=False ➜ Primary database + - View has use_read_replica=True ➜ Read replica + - View has no use_read_replica attribute ➜ Primary database (safe default) + The middleware supports both Django CBVs and DRF APIViews/ViewSets. + Context is properly isolated per request to prevent data leakage. + """ + + # HTTP methods that are considered read-only by default + READ_ONLY_METHODS = {"GET", "HEAD", "OPTIONS"} + + def __init__(self, get_response): + """ + Initialize the middleware with the next middleware/view in the chain. + Args: + get_response: The next middleware or view function + """ + self.get_response = get_response + + def __call__(self, request: HttpRequest) -> HttpResponse: + """ + Process the request and determine database routing. + Args: + request: The HTTP request object + Returns: + HttpResponse: The HTTP response from the view + """ + # For non-read operations, set primary database immediately + if request.method not in self.READ_ONLY_METHODS: + set_use_read_replica(False) + logger.debug(f"Routing {request.method} {request.path} to primary database") + + try: + # Process the request through the middleware chain + response = self.get_response(request) + return response + finally: + # Always clean up context, even if an exception occurs + # This prevents context leakage between requests + clear_read_replica_context() + + def process_view( + self, + request: HttpRequest, + view_func: Callable, + view_args: tuple, + view_kwargs: dict, + ) -> None: + """ + Hook called just before Django calls the view. + This is more efficient than resolving URLs in __call__ since Django + provides the view function directly. + Args: + request: The HTTP request object + view_func: The view function to be called + view_args: Positional arguments for the view + view_kwargs: Keyword arguments for the view + """ + # Only process read operations (write operations already handled in __call__) + if request.method in self.READ_ONLY_METHODS: + use_replica = self._should_use_read_replica(view_func) + set_use_read_replica(use_replica) + + db_type = "read replica" if use_replica else "primary database" + logger.debug(f"Routing {request.method} {request.path} to {db_type}") + + # Return None to continue normal request processing + return None + + def _should_use_read_replica(self, view_func: Callable) -> bool: + """ + Determine if the view should use read replica based on its configuration. + Args: + view_func: The view function to inspect + Returns: + bool: True if should use read replica, False for primary database + """ + use_replica_attr = self._get_use_replica_attribute(view_func) + + # Default to primary database for GET requests if no explicit setting + # This ensures only views that explicitly opt-in use read replicas + if use_replica_attr is None: + return False + + return bool(use_replica_attr) + + def _get_use_replica_attribute(self, view_func: Callable) -> Optional[bool]: + """ + Extract the use_read_replica attribute from various view types. + Args: + view_func: The view function to inspect + Returns: + Optional[bool]: The use_read_replica setting, or None if not found + """ + # Return None if view_func is None to prevent AttributeError + if view_func is None: + return None + + # Check function-based view attribute + use_replica = getattr(view_func, "use_read_replica", None) + if use_replica is not None: + return use_replica + + # Check Django CBV wrapper + if hasattr(view_func, "view_class"): + use_replica = getattr(view_func.view_class, "use_read_replica", None) + if use_replica is not None: + return use_replica + + # Check DRF wrapper (APIView / ViewSet) + if hasattr(view_func, "cls"): + use_replica = getattr(view_func.cls, "use_read_replica", None) + if use_replica is not None: + return use_replica + + return None + + def process_exception(self, request: HttpRequest, exception: Exception) -> None: + """ + Handle exceptions that occur during view processing. + This provides an additional safety net for context cleanup when views + raise exceptions, complementing the try/finally in __call__. + Args: + request: The HTTP request object + exception: The exception that was raised + Returns: + None: Don't handle the exception, just clean up context + """ + # Clean up context on exception as a safety measure + # The try/finally in __call__ should handle most cases, but this + # provides extra protection specifically for view exceptions + clear_read_replica_context() + logger.debug( + f"Cleaned up read replica context due to exception: {type(exception).__name__}" + ) + + # Return None to let the exception continue propagating + return None diff --git a/apps/api/plane/settings/common.py b/apps/api/plane/settings/common.py index 38d2ac6e0..3c3410107 100644 --- a/apps/api/plane/settings/common.py +++ b/apps/api/plane/settings/common.py @@ -75,6 +75,8 @@ REST_FRAMEWORK = { "DEFAULT_RENDERER_CLASSES": ("rest_framework.renderers.JSONRenderer",), "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",), "EXCEPTION_HANDLER": "plane.authentication.adapter.exception.auth_exception_handler", + # Preserve original Django URL parameter names (pk) instead of converting to 'id' + "SCHEMA_COERCE_PATH_PK": False, } # Django Auth Backend @@ -147,6 +149,29 @@ else: } } + +if os.environ.get("ENABLE_READ_REPLICA", "0") == "1": + if bool(os.environ.get("DATABASE_READ_REPLICA_URL")): + # Parse database configuration from $DATABASE_URL + DATABASES["replica"] = dj_database_url.parse( + os.environ.get("DATABASE_READ_REPLICA_URL") + ) + else: + DATABASES["replica"] = { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_READ_REPLICA_DB"), + "USER": os.environ.get("POSTGRES_READ_REPLICA_USER"), + "PASSWORD": os.environ.get("POSTGRES_READ_REPLICA_PASSWORD"), + "HOST": os.environ.get("POSTGRES_READ_REPLICA_HOST"), + "PORT": os.environ.get("POSTGRES_READ_REPLICA_PORT", "5432"), + } + + # Database Routers + DATABASE_ROUTERS = ["plane.utils.core.dbrouters.ReadReplicaRouter"] + # Add middleware at the end for read replica routing + MIDDLEWARE.append("plane.middleware.db_routing.ReadReplicaRoutingMiddleware") + + # Redis Config REDIS_URL = os.environ.get("REDIS_URL") REDIS_SSL = REDIS_URL and "rediss" in REDIS_URL @@ -259,7 +284,7 @@ CELERY_IMPORTS = ( "plane.bgtasks.exporter_expired_task", "plane.bgtasks.file_asset_task", "plane.bgtasks.email_notification_task", - "plane.bgtasks.api_logs_task", + "plane.bgtasks.cleanup_task", "plane.license.bgtasks.tracer", # management tasks "plane.bgtasks.dummy_data_task", @@ -279,16 +304,10 @@ GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", False) ANALYTICS_SECRET_KEY = os.environ.get("ANALYTICS_SECRET_KEY", False) ANALYTICS_BASE_API = os.environ.get("ANALYTICS_BASE_API", False) - # Posthog settings POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY", False) POSTHOG_HOST = os.environ.get("POSTHOG_HOST", False) -# instance key -INSTANCE_KEY = os.environ.get( - "INSTANCE_KEY", "ae6517d563dfc13d8270bd45cf17b08f70b37d989128a9dab46ff687603333c3" -) - # Skip environment variable configuration SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1" @@ -439,3 +458,14 @@ ATTACHMENT_MIME_TYPES = [ # Seed directory path SEED_DIR = os.path.join(BASE_DIR, "seeds") + +ENABLE_DRF_SPECTACULAR = os.environ.get("ENABLE_DRF_SPECTACULAR", "0") == "1" + +if ENABLE_DRF_SPECTACULAR: + REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "drf_spectacular.openapi.AutoSchema" + INSTALLED_APPS.append("drf_spectacular") + from .openapi import SPECTACULAR_SETTINGS # noqa: F401 + +# MongoDB Settings +MONGO_DB_URL = os.environ.get("MONGO_DB_URL", False) +MONGO_DB_DATABASE = os.environ.get("MONGO_DB_DATABASE", False) diff --git a/apps/api/plane/settings/local.py b/apps/api/plane/settings/local.py index db60501f7..15af36a2d 100644 --- a/apps/api/plane/settings/local.py +++ b/apps/api/plane/settings/local.py @@ -73,5 +73,10 @@ LOGGING = { "handlers": ["console"], "propagate": False, }, + "plane.mongo": { + "level": "INFO", + "handlers": ["console"], + "propagate": False, + }, }, } diff --git a/apps/api/plane/settings/mongo.py b/apps/api/plane/settings/mongo.py new file mode 100644 index 000000000..57d25b477 --- /dev/null +++ b/apps/api/plane/settings/mongo.py @@ -0,0 +1,124 @@ +# Django imports +from django.conf import settings +import logging + +# Third party imports +from pymongo import MongoClient +from pymongo.database import Database +from pymongo.collection import Collection +from typing import Optional, TypeVar, Type + + +T = TypeVar("T", bound="MongoConnection") + +# Set up logger +logger = logging.getLogger("plane.mongo") + + +class MongoConnection: + """ + A singleton class that manages MongoDB connections. + + This class ensures only one MongoDB connection is maintained throughout the application. + It provides methods to access the MongoDB client, database, and collections. + + Attributes: + _instance (Optional[MongoConnection]): The singleton instance of this class + _client (Optional[MongoClient]): The MongoDB client instance + _db (Optional[Database]): The MongoDB database instance + """ + + _instance: Optional["MongoConnection"] = None + _client: Optional[MongoClient] = None + _db: Optional[Database] = None + + def __new__(cls: Type[T]) -> T: + """ + Creates a new instance of MongoConnection if one doesn't exist. + + Returns: + MongoConnection: The singleton instance + """ + if cls._instance is None: + cls._instance = super(MongoConnection, cls).__new__(cls) + try: + mongo_url = getattr(settings, "MONGO_DB_URL", None) + mongo_db_database = getattr(settings, "MONGO_DB_DATABASE", None) + + if not mongo_url or not mongo_db_database: + logger.warning( + "MongoDB connection parameters not configured. MongoDB functionality will be disabled." + ) + return cls._instance + + cls._client = MongoClient(mongo_url) + cls._db = cls._client[mongo_db_database] + + # Test the connection + cls._client.server_info() + logger.info("MongoDB connection established successfully") + except Exception as e: + logger.warning( + f"Failed to initialize MongoDB connection: {str(e)}. MongoDB functionality will be disabled." + ) + return cls._instance + + @classmethod + def get_client(cls) -> Optional[MongoClient]: + """ + Returns the MongoDB client instance. + + Returns: + Optional[MongoClient]: The MongoDB client instance or None if not configured + """ + if cls._client is None: + cls._instance = cls() + return cls._client + + @classmethod + def get_db(cls) -> Optional[Database]: + """ + Returns the MongoDB database instance. + + Returns: + Optional[Database]: The MongoDB database instance or None if not configured + """ + if cls._db is None: + cls._instance = cls() + return cls._db + + @classmethod + def get_collection(cls, collection_name: str) -> Optional[Collection]: + """ + Returns a MongoDB collection by name. + + Args: + collection_name (str): The name of the collection to retrieve + + Returns: + Optional[Collection]: The MongoDB collection instance or None if not configured + """ + try: + db = cls.get_db() + if db is None: + logger.warning( + f"Cannot access collection '{collection_name}': MongoDB not configured" + ) + return None + return db[collection_name] + except Exception as e: + logger.warning(f"Failed to access collection '{collection_name}': {str(e)}") + return None + + @classmethod + def is_configured(cls) -> bool: + """ + Check if MongoDB is properly configured and connected. + + Returns: + bool: True if MongoDB is configured and connected, False otherwise + """ + + if cls._client is None: + cls._instance = cls() + return cls._client is not None and cls._db is not None diff --git a/apps/api/plane/settings/openapi.py b/apps/api/plane/settings/openapi.py new file mode 100644 index 000000000..b79daeecf --- /dev/null +++ b/apps/api/plane/settings/openapi.py @@ -0,0 +1,272 @@ +""" +OpenAPI/Swagger configuration for drf-spectacular. + +This file contains the complete configuration for API documentation generation. +""" + +SPECTACULAR_SETTINGS = { + # ======================================================================== + # Basic API Information + # ======================================================================== + "TITLE": "The Plane REST API", + "DESCRIPTION": ( + "The Plane REST API\n\n" + "Visit our quick start guide and full API documentation at " + "[developers.plane.so](https://developers.plane.so/api-reference/introduction)." + ), + "CONTACT": { + "name": "Plane", + "url": "https://plane.so", + "email": "support@plane.so", + }, + "VERSION": "0.0.1", + "LICENSE": { + "name": "GNU AGPLv3", + "url": "https://github.com/makeplane/plane/blob/preview/LICENSE.txt", + }, + # ======================================================================== + # Schema Generation Settings + # ======================================================================== + "SERVE_INCLUDE_SCHEMA": False, + "SCHEMA_PATH_PREFIX": "/api/v1/", + "SCHEMA_CACHE_TIMEOUT": 0, # disables caching + # ======================================================================== + # Processing Hooks + # ======================================================================== + "PREPROCESSING_HOOKS": [ + "plane.utils.openapi.hooks.preprocess_filter_api_v1_paths", + ], + # ======================================================================== + # Server Configuration + # ======================================================================== + "SERVERS": [ + {"url": "http://localhost:8000", "description": "Local"}, + {"url": "https://api.plane.so", "description": "Production"}, + ], + # ======================================================================== + # API Tag Definitions + # ======================================================================== + "TAGS": [ + # System Features + { + "name": "Assets", + "description": ( + "**File Upload & Presigned URLs**\n\n" + "Generate presigned URLs for direct file uploads to cloud storage. Handle user avatars, " + "cover images, and generic project assets with secure upload workflows.\n\n" + "*Key Features:*\n" + "- Generate presigned URLs for S3 uploads\n" + "- Support for user avatars and cover images\n" + "- Generic asset upload for projects\n" + "- File validation and size limits\n\n" + "*Use Cases:* User profile images, project file uploads, secure direct-to-cloud uploads." + ), + }, + # Project Organization + { + "name": "Cycles", + "description": ( + "**Sprint & Development Cycles**\n\n" + "Create and manage development cycles (sprints) to organize work into time-boxed iterations. " + "Track progress, assign work items, and monitor team velocity.\n\n" + "*Key Features:*\n" + "- Create and configure development cycles\n" + "- Assign work items to cycles\n" + "- Track cycle progress and completion\n" + "- Generate cycle analytics and reports\n\n" + "*Use Cases:* Sprint planning, iterative development, progress tracking, team velocity." + ), + }, + # System Features + { + "name": "Intake", + "description": ( + "**Work Item Intake Queue**\n\n" + "Manage incoming work items through a dedicated intake queue for triage and review. " + "Submit, update, and process work items before they enter the main project workflow.\n\n" + "*Key Features:*\n" + "- Submit work items to intake queue\n" + "- Review and triage incoming work items\n" + "- Update intake work item status and properties\n" + "- Accept, reject, or modify work items before approval\n\n" + "*Use Cases:* Work item triage, external submissions, quality review, approval workflows." + ), + }, + # Project Organization + { + "name": "Labels", + "description": ( + "**Labels & Tags**\n\n" + "Create and manage labels to categorize and organize work items. Use color-coded labels " + "for easy identification, filtering, and project organization.\n\n" + "*Key Features:*\n" + "- Create custom labels with colors and descriptions\n" + "- Apply labels to work items for categorization\n" + "- Filter and search by labels\n" + "- Organize labels across projects\n\n" + "*Use Cases:* Priority marking, feature categorization, bug classification, team organization." + ), + }, + # Team & User Management + { + "name": "Members", + "description": ( + "**Team Member Management**\n\n" + "Manage team members, roles, and permissions within projects and workspaces. " + "Control access levels and track member participation.\n\n" + "*Key Features:*\n" + "- Invite and manage team members\n" + "- Assign roles and permissions\n" + "- Control project and workspace access\n" + "- Track member activity and participation\n\n" + "*Use Cases:* Team setup, access control, role management, collaboration." + ), + }, + # Project Organization + { + "name": "Modules", + "description": ( + "**Feature Modules**\n\n" + "Group related work items into modules for better organization and tracking. " + "Plan features, track progress, and manage deliverables at a higher level.\n\n" + "*Key Features:*\n" + "- Create and organize feature modules\n" + "- Group work items by module\n" + "- Track module progress and completion\n" + "- Manage module leads and assignments\n\n" + "*Use Cases:* Feature planning, release organization, progress tracking, team coordination." + ), + }, + # Core Project Management + { + "name": "Projects", + "description": ( + "**Project Management**\n\n" + "Create and manage projects to organize your development work. Configure project settings, " + "manage team access, and control project visibility.\n\n" + "*Key Features:*\n" + "- Create, update, and delete projects\n" + "- Configure project settings and preferences\n" + "- Manage team access and permissions\n" + "- Control project visibility and sharing\n\n" + "*Use Cases:* Project setup, team collaboration, access control, project configuration." + ), + }, + # Project Organization + { + "name": "States", + "description": ( + "**Workflow States**\n\n" + "Define custom workflow states for work items to match your team's process. " + "Configure state transitions and track work item progress through different stages.\n\n" + "*Key Features:*\n" + "- Create custom workflow states\n" + "- Configure state transitions and rules\n" + "- Track work item progress through states\n" + "- Set state-based permissions and automation\n\n" + "*Use Cases:* Custom workflows, status tracking, process automation, progress monitoring." + ), + }, + # Team & User Management + { + "name": "Users", + "description": ( + "**Current User Information**\n\n" + "Get information about the currently authenticated user including profile details " + "and account settings.\n\n" + "*Key Features:*\n" + "- Retrieve current user profile\n" + "- Access user account information\n" + "- View user preferences and settings\n" + "- Get authentication context\n\n" + "*Use Cases:* Profile display, user context, account information, authentication status." + ), + }, + # Work Item Management + { + "name": "Work Item Activity", + "description": ( + "**Activity History & Search**\n\n" + "View activity history and search for work items across the workspace. " + "Get detailed activity logs and find work items using text search.\n\n" + "*Key Features:*\n" + "- View work item activity history\n" + "- Search work items across workspace\n" + "- Track changes and modifications\n" + "- Filter search results by project\n\n" + "*Use Cases:* Activity tracking, work item discovery, change history, workspace search." + ), + }, + { + "name": "Work Item Attachments", + "description": ( + "**Work Item File Attachments**\n\n" + "Generate presigned URLs for uploading files directly to specific work items. " + "Upload and manage attachments associated with work items.\n\n" + "*Key Features:*\n" + "- Generate presigned URLs for work item attachments\n" + "- Upload files directly to work items\n" + "- Retrieve and manage attachment metadata\n" + "- Delete attachments from work items\n\n" + "*Use Cases:* Screenshots, error logs, design files, supporting documents." + ), + }, + { + "name": "Work Item Comments", + "description": ( + "**Comments & Discussions**\n\n" + "Add comments and discussions to work items for team collaboration. " + "Support threaded conversations, mentions, and rich text formatting.\n\n" + "*Key Features:*\n" + "- Add comments to work items\n" + "- Thread conversations and replies\n" + "- Mention users and trigger notifications\n" + "- Rich text and markdown support\n\n" + "*Use Cases:* Team discussions, progress updates, code reviews, decision tracking." + ), + }, + { + "name": "Work Item Links", + "description": ( + "**External Links & References**\n\n" + "Link work items to external resources like documentation, repositories, or design files. " + "Maintain connections between work items and external systems.\n\n" + "*Key Features:*\n" + "- Add external URL links to work items\n" + "- Validate and preview linked resources\n" + "- Organize links by type and category\n" + "- Track link usage and access\n\n" + "*Use Cases:* Documentation links, repository connections, design references, external tools." + ), + }, + { + "name": "Work Items", + "description": ( + "**Work Items & Tasks**\n\n" + "Create and manage work items like tasks, bugs, features, and user stories. " + "The core entities for tracking work in your projects.\n\n" + "*Key Features:*\n" + "- Create, update, and manage work items\n" + "- Assign to team members and set priorities\n" + "- Track progress through workflow states\n" + "- Set due dates, estimates, and relationships\n\n" + "*Use Cases:* Bug tracking, task management, feature development, sprint planning." + ), + }, + ], + # ======================================================================== + # Security & Authentication + # ======================================================================== + "AUTHENTICATION_WHITELIST": [ + "plane.api.middleware.api_authentication.APIKeyAuthentication", + ], + # ======================================================================== + # Schema Generation Options + # ======================================================================== + "COMPONENT_NO_READ_ONLY_REQUIRED": True, + "COMPONENT_SPLIT_REQUEST": True, + "ENUM_NAME_OVERRIDES": { + "ModuleStatusEnum": "plane.db.models.module.ModuleStatus", + "IntakeWorkItemStatusEnum": "plane.db.models.intake.IntakeIssueStatus", + }, +} diff --git a/apps/api/plane/settings/production.py b/apps/api/plane/settings/production.py index abd95d006..4f4e99bdb 100644 --- a/apps/api/plane/settings/production.py +++ b/apps/api/plane/settings/production.py @@ -83,5 +83,10 @@ LOGGING = { "handlers": ["console"], "propagate": False, }, + "plane.mongo": { + "level": "INFO", + "handlers": ["console"], + "propagate": False, + }, }, } diff --git a/apps/api/plane/space/serializer/__init__.py b/apps/api/plane/space/serializer/__init__.py index ad4e9897d..a3fe1029f 100644 --- a/apps/api/plane/space/serializer/__init__.py +++ b/apps/api/plane/space/serializer/__init__.py @@ -1,5 +1,5 @@ from .user import UserLiteSerializer -from .issue import LabelLiteSerializer, StateLiteSerializer, IssuePublicSerializer +from .issue import LabelLiteSerializer, IssuePublicSerializer -from .state import StateSerializer, StateLiteSerializer +from .state import StateSerializer diff --git a/apps/api/plane/space/serializer/issue.py b/apps/api/plane/space/serializer/issue.py index 3549e7626..64f151a2d 100644 --- a/apps/api/plane/space/serializer/issue.py +++ b/apps/api/plane/space/serializer/issue.py @@ -30,7 +30,6 @@ from plane.db.models import ( ) from plane.utils.content_validator import ( validate_html_content, - validate_json_content, validate_binary_data, ) @@ -290,20 +289,22 @@ class IssueCreateSerializer(BaseSerializer): raise serializers.ValidationError("Start date cannot exceed target date") # Validate description content for security - if "description" in data and data["description"]: - is_valid, error_msg = validate_json_content(data["description"]) - if not is_valid: - raise serializers.ValidationError({"description": error_msg}) - if "description_html" in data and data["description_html"]: - is_valid, error_msg = validate_html_content(data["description_html"]) + is_valid, error_msg, sanitized_html = validate_html_content( + data["description_html"] + ) if not is_valid: - raise serializers.ValidationError({"description_html": error_msg}) + raise serializers.ValidationError( + {"error": "html content is not valid"} + ) + # Update the data with sanitized HTML if available + if sanitized_html is not None: + data["description_html"] = sanitized_html if "description_binary" in data and data["description_binary"]: is_valid, error_msg = validate_binary_data(data["description_binary"]) if not is_valid: - raise serializers.ValidationError({"description_binary": error_msg}) + raise serializers.ValidationError({"description_binary": "Invalid binary data"}) return data diff --git a/apps/api/plane/tests/conftest.py b/apps/api/plane/tests/conftest.py index b70c9352a..15f3a8a28 100644 --- a/apps/api/plane/tests/conftest.py +++ b/apps/api/plane/tests/conftest.py @@ -1,15 +1,13 @@ import pytest -from django.conf import settings from rest_framework.test import APIClient from pytest_django.fixtures import django_db_setup -from unittest.mock import patch, MagicMock from plane.db.models import User, Workspace, WorkspaceMember from plane.db.models.api import APIToken @pytest.fixture(scope="session") -def django_db_setup(django_db_setup): +def django_db_setup(django_db_setup): # noqa: F811 """Set up the Django database for the test session""" pass diff --git a/apps/api/plane/tests/conftest_external.py b/apps/api/plane/tests/conftest_external.py index 50022b490..b4853e531 100644 --- a/apps/api/plane/tests/conftest_external.py +++ b/apps/api/plane/tests/conftest_external.py @@ -1,6 +1,5 @@ import pytest from unittest.mock import MagicMock, patch -from django.conf import settings @pytest.fixture diff --git a/apps/api/plane/tests/contract/api/test_labels.py b/apps/api/plane/tests/contract/api/test_labels.py new file mode 100644 index 000000000..a27bc31dc --- /dev/null +++ b/apps/api/plane/tests/contract/api/test_labels.py @@ -0,0 +1,228 @@ +import pytest +from rest_framework import status +from django.db import IntegrityError +from uuid import uuid4 + +from plane.db.models import Label, Project, ProjectMember + + +@pytest.fixture +def project(db, workspace, create_user): + """Create a test project with the user as a member""" + project = Project.objects.create( + name="Test Project", + identifier="TP", + workspace=workspace, + created_by=create_user, + ) + ProjectMember.objects.create( + project=project, + member=create_user, + role=20, # Admin role + is_active=True, + ) + return project + + +@pytest.fixture +def label_data(): + """Sample label data for tests""" + return { + "name": "Test Label", + "color": "#FF5733", + "description": "A test label for unit tests", + } + + +@pytest.fixture +def create_label(db, project, create_user): + """Create a test label""" + return Label.objects.create( + name="Existing Label", + color="#00FF00", + description="An existing label", + project=project, + workspace=project.workspace, + created_by=create_user, + ) + + +@pytest.mark.contract +class TestLabelListCreateAPIEndpoint: + """Test Label List and Create API Endpoint""" + + def get_label_url(self, workspace_slug, project_id): + """Helper to get label endpoint URL""" + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/labels/" + + @pytest.mark.django_db + def test_create_label_success(self, api_key_client, workspace, project, label_data): + """Test successful label creation""" + url = self.get_label_url(workspace.slug, project.id) + + response = api_key_client.post(url, label_data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + assert Label.objects.count() == 1 + + created_label = Label.objects.first() + assert created_label.name == label_data["name"] + assert created_label.color == label_data["color"] + assert created_label.description == label_data["description"] + assert created_label.project == project + + @pytest.mark.django_db + def test_create_label_invalid_data(self, api_key_client, workspace, project): + """Test label creation with invalid data""" + url = self.get_label_url(workspace.slug, project.id) + + # Test with empty data + response = api_key_client.post(url, {}, format="json") + assert response.status_code == status.HTTP_400_BAD_REQUEST + + # Test with missing name + response = api_key_client.post(url, {"color": "#FF5733"}, format="json") + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.django_db + def test_create_label_with_external_id(self, api_key_client, workspace, project): + """Test creating label with external ID""" + url = self.get_label_url(workspace.slug, project.id) + + label_data = { + "name": "External Label", + "color": "#FF5733", + "external_id": "ext-123", + "external_source": "github", + } + + response = api_key_client.post(url, label_data, format="json") + + assert response.status_code == status.HTTP_201_CREATED + created_label = Label.objects.first() + assert created_label.external_id == "ext-123" + assert created_label.external_source == "github" + + @pytest.mark.django_db + def test_create_label_duplicate_external_id( + self, api_key_client, workspace, project + ): + """Test creating label with duplicate external ID""" + url = self.get_label_url(workspace.slug, project.id) + + # Create first label + Label.objects.create( + name="First Label", + project=project, + workspace=workspace, + external_id="ext-123", + external_source="github", + ) + + # Try to create second label with same external ID + label_data = { + "name": "Second Label", + "external_id": "ext-123", + "external_source": "github", + } + + response = api_key_client.post(url, label_data, format="json") + + assert response.status_code == status.HTTP_409_CONFLICT + assert "same external id" in response.data["error"] + + @pytest.mark.django_db + def test_list_labels_success( + self, api_key_client, workspace, project, create_label + ): + """Test successful label listing""" + url = self.get_label_url(workspace.slug, project.id) + + # Create additional labels + Label.objects.create( + name="Label 2", project=project, workspace=workspace, color="#00FF00" + ) + Label.objects.create( + name="Label 3", project=project, workspace=workspace, color="#0000FF" + ) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert "results" in response.data + assert len(response.data["results"]) == 3 # Including create_label fixture + + +@pytest.mark.contract +class TestLabelDetailAPIEndpoint: + """Test Label Detail API Endpoint""" + + def get_label_detail_url(self, workspace_slug, project_id, label_id): + """Helper to get label detail endpoint URL""" + return f"/api/v1/workspaces/{workspace_slug}/projects/{project_id}/labels/{label_id}/" + + @pytest.mark.django_db + def test_get_label_success(self, api_key_client, workspace, project, create_label): + """Test successful label retrieval""" + url = self.get_label_detail_url(workspace.slug, project.id, create_label.id) + + response = api_key_client.get(url) + + assert response.status_code == status.HTTP_200_OK + assert response.data["id"] == create_label.id + assert response.data["name"] == create_label.name + assert response.data["color"] == create_label.color + + @pytest.mark.django_db + def test_get_label_not_found(self, api_key_client, workspace, project): + """Test getting non-existent label""" + from uuid import uuid4 + + fake_id = uuid4() + url = self.get_label_detail_url(workspace.slug, project.id, fake_id) + + response = api_key_client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.django_db + def test_update_label_success( + self, api_key_client, workspace, project, create_label + ): + """Test successful label update""" + url = self.get_label_detail_url(workspace.slug, project.id, create_label.id) + + update_data = { + "name": f"Updated Label {uuid4()}", + } + + response = api_key_client.patch(url, update_data, format="json") + + assert response.status_code == status.HTTP_200_OK + + create_label.refresh_from_db() + assert create_label.name == update_data["name"] + + @pytest.mark.django_db + def test_update_label_invalid_data( + self, api_key_client, workspace, project, create_label + ): + """Test label update with invalid data""" + url = self.get_label_detail_url(workspace.slug, project.id, create_label.id) + + update_data = {"name": ""} + response = api_key_client.patch(url, update_data, format="json") + + # This might be 400 if name is required, or 200 if empty names are allowed + assert response.status_code in [status.HTTP_400_BAD_REQUEST, status.HTTP_200_OK] + + @pytest.mark.django_db + def test_delete_label_success( + self, api_key_client, workspace, project, create_label + ): + """Test successful label deletion""" + url = self.get_label_detail_url(workspace.slug, project.id, create_label.id) + + response = api_key_client.delete(url) + + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not Label.objects.filter(id=create_label.id).exists() diff --git a/apps/api/plane/tests/contract/app/test_authentication.py b/apps/api/plane/tests/contract/app/test_authentication.py index a52882b9d..b44f5f3fc 100644 --- a/apps/api/plane/tests/contract/app/test_authentication.py +++ b/apps/api/plane/tests/contract/app/test_authentication.py @@ -6,7 +6,7 @@ from django.utils import timezone from rest_framework import status from django.test import Client from django.core.exceptions import ValidationError -from unittest.mock import patch, MagicMock +from unittest.mock import patch from plane.db.models import User from plane.settings.redis import redis_instance diff --git a/packages/propel/src/index.ts b/apps/api/plane/tests/unit/middleware/__init__.py similarity index 100% rename from packages/propel/src/index.ts rename to apps/api/plane/tests/unit/middleware/__init__.py diff --git a/apps/api/plane/tests/unit/middleware/test_db_routing.py b/apps/api/plane/tests/unit/middleware/test_db_routing.py new file mode 100644 index 000000000..73f222140 --- /dev/null +++ b/apps/api/plane/tests/unit/middleware/test_db_routing.py @@ -0,0 +1,433 @@ +""" +Unit tests for ReadReplicaRoutingMiddleware. +This module contains comprehensive tests for the ReadReplicaRoutingMiddleware +that handles intelligent database routing to read replicas based on HTTP methods +and view configuration. +Test Organization: +- TestReadReplicaRoutingMiddleware: Core middleware functionality +- TestProcessView: process_view method behavior +- TestReplicaDecisionLogic: Decision logic for replica usage +- TestAttributeDetection: View attribute detection methods +- TestExceptionHandling: Exception handling and cleanup +- TestRealViewIntegration: Real Django/DRF view integration +- TestEdgeCases: Edge cases and error conditions +""" + +import pytest +from unittest.mock import Mock, patch + +from django.http import HttpResponse +from django.test import RequestFactory +from django.views import View +from rest_framework.views import APIView +from rest_framework.viewsets import ViewSet + +from plane.middleware.db_routing import ReadReplicaRoutingMiddleware + + +# Pytest fixtures +@pytest.fixture +def mock_get_response(): + """Fixture for mocked get_response callable.""" + return Mock(return_value=HttpResponse()) + + +@pytest.fixture +def middleware(mock_get_response): + """Fixture for ReadReplicaRoutingMiddleware instance.""" + return ReadReplicaRoutingMiddleware(mock_get_response) + + +@pytest.fixture +def request_factory(): + """Fixture for Django RequestFactory.""" + return RequestFactory() + + +@pytest.fixture +def mock_view_func(): + """Fixture for a basic mocked view function.""" + view = Mock() + view.use_read_replica = True + return view + + +@pytest.fixture +def get_request(request_factory): + """Fixture for a GET request.""" + return request_factory.get("/api/test/") + + +@pytest.fixture +def post_request(request_factory): + """Fixture for a POST request.""" + return request_factory.post("/api/test/") + + +@pytest.mark.unit +class TestReadReplicaRoutingMiddleware: + """Test cases for ReadReplicaRoutingMiddleware core functionality.""" + + def test_middleware_initialization(self, middleware, mock_get_response): + """Test middleware initializes correctly with expected attributes.""" + assert middleware.get_response == mock_get_response + assert hasattr(middleware, "READ_ONLY_METHODS") + assert "GET" in middleware.READ_ONLY_METHODS + assert "HEAD" in middleware.READ_ONLY_METHODS + assert "OPTIONS" in middleware.READ_ONLY_METHODS + + def test_read_only_methods_constant(self, middleware): + """Test READ_ONLY_METHODS contains expected HTTP methods.""" + expected_methods = {"GET", "HEAD", "OPTIONS"} + assert middleware.READ_ONLY_METHODS == expected_methods + + @patch("plane.middleware.db_routing.set_use_read_replica") + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_call_routes_write_methods_to_primary( + self, mock_clear, mock_set, middleware, post_request, mock_get_response + ): + """Test __call__ routes write methods to primary database.""" + response = middleware(post_request) + + mock_set.assert_called_once_with(False) # Primary database + mock_clear.assert_called_once() + assert response == mock_get_response.return_value + + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_call_with_read_methods_waits_for_process_view( + self, mock_clear, middleware, get_request, mock_get_response + ): + """Test __call__ with read methods waits for process_view.""" + response = middleware(get_request) + + mock_clear.assert_called_once() + assert response == mock_get_response.return_value + + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_call_always_cleans_up_context(self, mock_clear, middleware, get_request): + """Test __call__ always cleans up context.""" + middleware(get_request) + + mock_clear.assert_called_once() + + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_call_cleans_up_context_on_exception( + self, mock_clear, middleware, get_request, mock_get_response + ): + """Test __call__ cleans up context even if get_response raises.""" + mock_get_response.side_effect = Exception("Test exception") + + with pytest.raises(Exception, match="Test exception"): + middleware(get_request) + + mock_clear.assert_called_once() + + +@pytest.mark.unit +class TestProcessView: + """Test cases for process_view method functionality.""" + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_read_method_and_replica_true(self, mock_set, middleware, get_request): + """Test process_view with GET request and use_read_replica=True.""" + view_func = Mock() + view_func.use_read_replica = True + + result = middleware.process_view(get_request, view_func, (), {}) + + mock_set.assert_called_once_with(True) + assert result is None + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_read_method_and_replica_false( + self, mock_set, middleware, get_request + ): + """Test process_view with GET request and use_read_replica=False.""" + view_func = Mock() + view_func.use_read_replica = False + + result = middleware.process_view(get_request, view_func, (), {}) + + mock_set.assert_called_once_with(False) + assert result is None + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_read_method_and_no_replica_attribute( + self, mock_set, middleware, get_request + ): + """Test process_view with GET request and no use_read_replica attr.""" + view_func = Mock(spec=[]) # No use_read_replica attribute + + result = middleware.process_view(get_request, view_func, (), {}) + + mock_set.assert_called_once_with(False) # Default to primary + assert result is None + + def test_with_write_method_ignores_view_attributes(self, middleware, post_request): + """Test process_view with write methods ignores view attributes.""" + view_func = Mock() + view_func.use_read_replica = True # This should be ignored for POST + + result = middleware.process_view(post_request, view_func, (), {}) + + assert result is None # Should not process for write methods + + +@pytest.mark.unit +class TestReplicaDecisionLogic: + """Test cases for replica decision logic methods.""" + + def test_should_use_read_replica_with_true_attribute(self, middleware): + """Test _should_use_read_replica returns True for True attribute.""" + view_func = Mock() + view_func.use_read_replica = True + + result = middleware._should_use_read_replica(view_func) + + assert result is True + + def test_should_use_read_replica_with_false_attribute(self, middleware): + """Test _should_use_read_replica returns False for False attribute.""" + view_func = Mock() + view_func.use_read_replica = False + + result = middleware._should_use_read_replica(view_func) + + assert result is False + + def test_should_use_read_replica_with_no_attribute_defaults_false(self, middleware): + """Test _should_use_read_replica defaults to False for missing attr.""" + view_func = Mock(spec=[]) # No use_read_replica attribute + + result = middleware._should_use_read_replica(view_func) + + assert result is False + + +@pytest.mark.unit +class TestAttributeDetection: + """Test cases for view attribute detection methods.""" + + def test_get_use_replica_attribute_function_based_view(self, middleware): + """Test _get_use_replica_attribute with function-based view.""" + # Test with True + view_func = Mock() + view_func.use_read_replica = True + result = middleware._get_use_replica_attribute(view_func) + assert result is True + + # Test with False + view_func.use_read_replica = False + result = middleware._get_use_replica_attribute(view_func) + assert result is False + + # Test with no attribute + view_func = Mock(spec=[]) + result = middleware._get_use_replica_attribute(view_func) + assert result is None + + def test_get_use_replica_attribute_django_cbv(self, middleware): + """Test _get_use_replica_attribute with Django CBV wrapper.""" + view_class = Mock() + view_class.use_read_replica = True + view_func = Mock() + view_func.view_class = view_class + # Remove use_read_replica from view_func to ensure it checks view_class + del view_func.use_read_replica + + result = middleware._get_use_replica_attribute(view_func) + + assert result is True + + def test_get_use_replica_attribute_drf_wrapper(self, middleware): + """Test _get_use_replica_attribute with DRF wrapper.""" + + # Create a real object to avoid Mock issues + class ViewClass: + use_read_replica = True + + class ViewFunc: + cls = ViewClass() + + view_func = ViewFunc() + + result = middleware._get_use_replica_attribute(view_func) + + assert result is True + + def test_get_use_replica_attribute_priority_order(self, middleware): + """Test attribute priority: direct > view_class > cls.""" + view_func = Mock() + view_func.use_read_replica = True # Direct attribute (highest priority) + + # Add conflicting attributes with lower priority + view_class = Mock() + view_class.use_read_replica = False + view_func.view_class = view_class + + cls = Mock() + cls.use_read_replica = False + view_func.cls = cls + + result = middleware._get_use_replica_attribute(view_func) + + assert result is True # Should use direct attribute + + @pytest.mark.parametrize( + "value,expected", + [ + (True, True), + (False, False), + (1, True), + (0, False), + ("yes", True), + ("", False), + ([], False), + ([1], True), + (None, False), + ], + ) + def test_should_use_read_replica_truthy_falsy_values( + self, middleware, value, expected + ): + """Test _should_use_read_replica with various truthy/falsy values.""" + + # Create a real object to test the attribute handling + class TestView: + pass + + view_func = TestView() + view_func.use_read_replica = value + + result = middleware._should_use_read_replica(view_func) + + assert result == expected + + +@pytest.mark.unit +class TestExceptionHandling: + """Test cases for exception handling and cleanup.""" + + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_process_exception_cleans_up_context( + self, mock_clear, middleware, request_factory + ): + """Test process_exception cleans up context.""" + request = request_factory.get("/api/test/") + exception = Exception("Test exception") + + result = middleware.process_exception(request, exception) + + mock_clear.assert_called_once() + assert result is None # Don't handle the exception + + @patch("plane.middleware.db_routing.set_use_read_replica") + @patch("plane.middleware.db_routing.clear_read_replica_context") + def test_integration_full_request_cycle( + self, mock_clear, mock_set, middleware, request_factory, mock_get_response + ): + """Test complete request cycle from __call__ through process_view.""" + request = request_factory.get("/api/test/") + view_func = Mock() + view_func.use_read_replica = True + + # Call middleware and process_view manually + response = middleware(request) + middleware.process_view(request, view_func, (), {}) + + mock_set.assert_called_once_with(True) + mock_clear.assert_called_once() + assert response == mock_get_response.return_value + + +@pytest.mark.unit +class TestRealViewIntegration: + """Test middleware with real Django/DRF view classes.""" + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_django_class_based_view(self, mock_set, middleware, request_factory): + """Test middleware with actual Django CBV.""" + + class TestView(View): + use_read_replica = True + + # Simulate Django's URL resolver creating a view wrapper + view_func = TestView.as_view() + request = request_factory.get("/api/test/") + + middleware.process_view(request, view_func, (), {}) + + mock_set.assert_called_once_with(True) + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_drf_api_view(self, mock_set, middleware, request_factory): + """Test middleware with DRF APIView.""" + + class TestAPIView(APIView): + use_read_replica = True + + # Simulate DRF's URL pattern creating a view wrapper + view_func = TestAPIView.as_view() + request = request_factory.get("/api/test/") + + middleware.process_view(request, view_func, (), {}) + + mock_set.assert_called_once_with(True) + + @patch("plane.middleware.db_routing.set_use_read_replica") + def test_with_drf_viewset(self, mock_set, middleware, request_factory): + """Test middleware with DRF ViewSet.""" + + class TestViewSet(ViewSet): + use_read_replica = True + + # Simulate DRF router creating viewset action + view_func = TestViewSet.as_view({"get": "list"}) + request = request_factory.get("/api/test/") + + middleware.process_view(request, view_func, (), {}) + + mock_set.assert_called_once_with(True) + + +@pytest.mark.unit +class TestEdgeCases: + """Test edge cases and error conditions.""" + + def test_process_view_with_none_view_func(self, middleware, request_factory): + """Test process_view handles None view_func gracefully.""" + request = request_factory.get("/api/test/") + + result = middleware.process_view(request, None, (), {}) + + assert result is None # Should not crash + + def test_get_use_replica_attribute_with_attribute_error(self, middleware): + """Test _get_use_replica_attribute with view that raises AttributeError.""" + + # Create a view class that raises AttributeError on access + class ProblematicView: + def __getattr__(self, name): + if name == "use_read_replica": + raise AttributeError("Simulated attribute error") + raise AttributeError( + f"'{type(self).__name__}' object has no attribute '{name}'" + ) + + view_func = ProblematicView() + + result = middleware._get_use_replica_attribute(view_func) + + assert result is None # Should handle gracefully + + def test_multiple_exception_calls_are_safe(self, middleware, request_factory): + """Test that multiple calls to process_exception don't cause issues.""" + request = request_factory.get("/api/test/") + exception = Exception("Test exception") + + # Call multiple times + result1 = middleware.process_exception(request, exception) + result2 = middleware.process_exception(request, exception) + + assert result1 is None # Both should return None safely + assert result2 is None diff --git a/apps/api/plane/tests/unit/models/test_workspace_model.py b/apps/api/plane/tests/unit/models/test_workspace_model.py index aa3c15645..26a797512 100644 --- a/apps/api/plane/tests/unit/models/test_workspace_model.py +++ b/apps/api/plane/tests/unit/models/test_workspace_model.py @@ -1,7 +1,7 @@ import pytest from uuid import uuid4 -from plane.db.models import Workspace, WorkspaceMember, User +from plane.db.models import Workspace, WorkspaceMember @pytest.mark.unit diff --git a/apps/api/plane/urls.py b/apps/api/plane/urls.py index b692306a7..c06e67158 100644 --- a/apps/api/plane/urls.py +++ b/apps/api/plane/urls.py @@ -2,6 +2,11 @@ from django.conf import settings from django.urls import include, path, re_path +from drf_spectacular.views import ( + SpectacularAPIView, + SpectacularRedocView, + SpectacularSwaggerView, +) handler404 = "plane.app.views.error_404.custom_404_view" @@ -14,6 +19,20 @@ urlpatterns = [ path("", include("plane.web.urls")), ] +if settings.ENABLE_DRF_SPECTACULAR: + urlpatterns += [ + path("api/schema/", SpectacularAPIView.as_view(), name="schema"), + path( + "api/schema/swagger-ui/", + SpectacularSwaggerView.as_view(url_name="schema"), + name="swagger-ui", + ), + path( + "api/schema/redoc/", + SpectacularRedocView.as_view(url_name="schema"), + name="redoc", + ), + ] if settings.DEBUG: try: diff --git a/apps/api/plane/utils/color.py b/apps/api/plane/utils/color.py new file mode 100644 index 000000000..8c45389bd --- /dev/null +++ b/apps/api/plane/utils/color.py @@ -0,0 +1,9 @@ +import random +import string + + +def get_random_color(): + """ + Get a random color in hex format + """ + return "#" + "".join(random.choices(string.hexdigits, k=6)) diff --git a/apps/api/plane/utils/content_validator.py b/apps/api/plane/utils/content_validator.py index 7b9932a35..cf7c235ee 100644 --- a/apps/api/plane/utils/content_validator.py +++ b/apps/api/plane/utils/content_validator.py @@ -1,36 +1,14 @@ # Python imports import base64 -import json -import re +import nh3 +from plane.utils.exception_logger import log_exception +from bs4 import BeautifulSoup +from collections import defaultdict # Maximum allowed size for binary data (10MB) MAX_SIZE = 10 * 1024 * 1024 -# Maximum recursion depth to prevent stack overflow -MAX_RECURSION_DEPTH = 20 - -# Dangerous text patterns that could indicate XSS or script injection -DANGEROUS_TEXT_PATTERNS = [ - r"]*>.*?", - r"javascript\s*:", - r"data\s*:\s*text/html", - r"eval\s*\(", - r"document\s*\.", - r"window\s*\.", - r"location\s*\.", -] - -# Dangerous attribute patterns for HTML attributes -DANGEROUS_ATTR_PATTERNS = [ - r"javascript\s*:", - r"data\s*:\s*text/html", - r"eval\s*\(", - r"alert\s*\(", - r"document\s*\.", - r"window\s*\.", -] - # Suspicious patterns for binary data content SUSPICIOUS_BINARY_PATTERNS = [ "]*>", - r"", - # JavaScript URLs in various attributes - r'(?:href|src|action)\s*=\s*["\']?\s*javascript:', - # Data URLs with text/html (potential XSS) - r'(?:href|src|action)\s*=\s*["\']?\s*data:text/html', - # Dangerous event handlers with JavaScript-like content - r'on(?:load|error|click|focus|blur|change|submit|reset|select|resize|scroll|unload|beforeunload|hashchange|popstate|storage|message|offline|online)\s*=\s*["\']?[^"\']*(?:javascript|alert|eval|document\.|window\.|location\.|history\.)[^"\']*["\']?', - # Object and embed tags that could load external content - r"<(?:object|embed)[^>]*(?:data|src)\s*=", - # Base tag that could change relative URL resolution - r"]*href\s*=", - # Dangerous iframe sources - r']*src\s*=\s*["\']?(?:javascript:|data:text/html)', - # Meta refresh redirects - r']*http-equiv\s*=\s*["\']?refresh["\']?', - # Link tags - simplified patterns - r']*rel\s*=\s*["\']?stylesheet["\']?', - r']*href\s*=\s*["\']?https?://', - r']*href\s*=\s*["\']?//', - r']*href\s*=\s*["\']?(?:data:|javascript:)', - # Style tags with external imports - r"]*>.*?@import.*?(?:https?://|//)", - # Link tags with dangerous rel types - r']*rel\s*=\s*["\']?(?:import|preload|prefetch|dns-prefetch|preconnect)["\']?', - # Forms with action attributes - r"]*action\s*=", -] - -# Dangerous JavaScript patterns for event handlers -DANGEROUS_JS_PATTERNS = [ - r"alert\s*\(", - r"eval\s*\(", - r"document\s*\.", - r"window\s*\.", - r"location\s*\.", - r"fetch\s*\(", - r"XMLHttpRequest", - r"innerHTML\s*=", - r"outerHTML\s*=", - r"document\.write", - r"script\s*>", -] - -# HTML self-closing tags that don't need closing tags -SELF_CLOSING_TAGS = { - "img", - "br", - "hr", - "input", - "meta", - "link", - "area", - "base", - "col", - "embed", - "source", - "track", - "wbr", -} - def validate_binary_data(data): """ - Validate that binary data appears to be valid document format and doesn't contain malicious content. + Validate that binary data appears to be a valid document format + and doesn't contain malicious content. Args: data (bytes or str): The binary data to validate, or base64-encoded string @@ -149,209 +64,180 @@ def validate_binary_data(data): return True, None -def validate_html_content(html_content): +# Combine custom components and editor-specific nodes into a single set of tags +CUSTOM_TAGS = { + # editor node/tag names + "mention-component", + "label", + "input", + "image-component", +} +ALLOWED_TAGS = nh3.ALLOWED_TAGS | CUSTOM_TAGS + +# Merge nh3 defaults with all attributes used across our custom components +ATTRIBUTES = { + "*": { + "class", + "id", + "title", + "role", + "aria-label", + "aria-hidden", + "style", + "start", + "type", + # common editor data-* attributes seen in stored HTML + # (wildcards like data-* are NOT supported by nh3; we add known keys + # here and dynamically include all data-* seen in the input below) + "data-tight", + "data-node-type", + "data-type", + "data-checked", + "data-background-color", + "data-text-color", + "data-name", + # callout attributes + "data-icon-name", + "data-icon-color", + "data-background", + "data-emoji-unicode", + "data-emoji-url", + "data-logo-in-use", + "data-block-type", + }, + "a": {"href", "target"}, + # editor node/tag attributes + "image-component": { + "id", + "width", + "height", + "aspectRatio", + "aspectratio", + "src", + "alignment", + }, + "img": { + "width", + "height", + "aspectRatio", + "aspectratio", + "alignment", + "src", + "alt", + "title", + }, + "mention-component": {"id", "entity_identifier", "entity_name"}, + "th": { + "colspan", + "rowspan", + "colwidth", + "background", + "hideContent", + "hidecontent", + "style", + }, + "td": { + "colspan", + "rowspan", + "colwidth", + "background", + "textColor", + "textcolor", + "hideContent", + "hidecontent", + "style", + }, + "tr": {"background", "textColor", "textcolor", "style"}, + "pre": {"language"}, + "code": {"language", "spellcheck"}, + "input": {"type", "checked"}, +} + +SAFE_PROTOCOLS = {"http", "https", "mailto", "tel"} + + +def _compute_html_sanitization_diff(before_html: str, after_html: str): """ - Validate that HTML content is safe and doesn't contain malicious patterns. + Compute a coarse diff between original and sanitized HTML. - Args: - html_content (str): The HTML content to validate + Returns a dict with: + - removed_tags: mapping[tag] -> removed_count + - removed_attributes: mapping[tag] -> sorted list of attribute names removed + """ + try: - Returns: - tuple: (is_valid: bool, error_message: str or None) + def collect(soup): + tag_counts = defaultdict(int) + attrs_by_tag = defaultdict(set) + for el in soup.find_all(True): + tag_name = (el.name or "").lower() + if not tag_name: + continue + tag_counts[tag_name] += 1 + for attr_name in list(el.attrs.keys()): + if isinstance(attr_name, str) and attr_name: + attrs_by_tag[tag_name].add(attr_name.lower()) + return tag_counts, attrs_by_tag + + soup_before = BeautifulSoup(before_html or "", "html.parser") + soup_after = BeautifulSoup(after_html or "", "html.parser") + + counts_before, attrs_before = collect(soup_before) + counts_after, attrs_after = collect(soup_after) + + removed_tags = {} + for tag, cnt_before in counts_before.items(): + cnt_after = counts_after.get(tag, 0) + if cnt_after < cnt_before: + removed = cnt_before - cnt_after + removed_tags[tag] = removed + + removed_attributes = {} + for tag, before_set in attrs_before.items(): + after_set = attrs_after.get(tag, set()) + removed = before_set - after_set + if removed: + removed_attributes[tag] = sorted(list(removed)) + + return {"removed_tags": removed_tags, "removed_attributes": removed_attributes} + except Exception: + # Best-effort only; if diffing fails we don't block the request + return {"removed_tags": {}, "removed_attributes": {}} + + +def validate_html_content(html_content: str): + """ + Sanitize HTML content using nh3. + Returns a tuple: (is_valid, error_message, clean_html) """ if not html_content: - return True, None # Empty is OK + return True, None, None # Size check - 10MB limit (consistent with binary validation) if len(html_content.encode("utf-8")) > MAX_SIZE: - return False, "HTML content exceeds maximum size limit (10MB)" + return False, "HTML content exceeds maximum size limit (10MB)", None - # Check for specific malicious patterns (simplified and more reliable) - for pattern in MALICIOUS_HTML_PATTERNS: - if re.search(pattern, html_content, re.IGNORECASE | re.DOTALL): - return ( - False, - f"HTML content contains potentially malicious patterns: {pattern}", + try: + clean_html = nh3.clean( + html_content, + tags=ALLOWED_TAGS, + attributes=ATTRIBUTES, + url_schemes=SAFE_PROTOCOLS, + ) + # Report removals to logger (Sentry) if anything was stripped + diff = _compute_html_sanitization_diff(html_content, clean_html) + if diff.get("removed_tags") or diff.get("removed_attributes"): + try: + import json + + summary = json.dumps(diff) + except Exception: + summary = str(diff) + log_exception( + f"HTML sanitization removals: {summary}", + warning=True, ) - - # Additional check for inline event handlers that contain suspicious content - # This is more permissive - only blocks if the event handler contains actual dangerous code - event_handler_pattern = r'on\w+\s*=\s*["\']([^"\']*)["\']' - event_matches = re.findall(event_handler_pattern, html_content, re.IGNORECASE) - - for handler_content in event_matches: - for js_pattern in DANGEROUS_JS_PATTERNS: - if re.search(js_pattern, handler_content, re.IGNORECASE): - return ( - False, - f"HTML content contains dangerous JavaScript in event handler: {handler_content[:100]}", - ) - - # Basic HTML structure validation - check for common malformed tags - try: - # Count opening and closing tags for basic structure validation - opening_tags = re.findall(r"<(\w+)[^>]*>", html_content) - closing_tags = re.findall(r"", html_content) - - # Filter out self-closing tags from opening tags - opening_tags_filtered = [ - tag for tag in opening_tags if tag.lower() not in SELF_CLOSING_TAGS - ] - - # Basic check - if we have significantly more opening than closing tags, it might be malformed - if len(opening_tags_filtered) > len(closing_tags) + 10: # Allow some tolerance - return False, "HTML content appears to be malformed (unmatched tags)" - - except Exception: - # If HTML parsing fails, we'll allow it - pass - - return True, None - - -def validate_json_content(json_content): - """ - Validate that JSON content is safe and doesn't contain malicious patterns. - - Args: - json_content (dict): The JSON content to validate - - Returns: - tuple: (is_valid: bool, error_message: str or None) - """ - if not json_content: - return True, None # Empty is OK - - try: - # Size check - 10MB limit (consistent with other validations) - json_str = json.dumps(json_content) - if len(json_str.encode("utf-8")) > MAX_SIZE: - return False, "JSON content exceeds maximum size limit (10MB)" - - # Basic structure validation for page description JSON - if isinstance(json_content, dict): - # Check for expected page description structure - # This is based on ProseMirror/Tiptap JSON structure - if "type" in json_content and json_content.get("type") == "doc": - # Valid document structure - if "content" in json_content and isinstance( - json_content["content"], list - ): - # Recursively check content for suspicious patterns - is_valid, error_msg = _validate_json_content_array( - json_content["content"] - ) - if not is_valid: - return False, error_msg - elif "type" not in json_content and "content" not in json_content: - # Allow other JSON structures but validate for suspicious content - is_valid, error_msg = _validate_json_content_recursive(json_content) - if not is_valid: - return False, error_msg - else: - return False, "JSON description must be a valid object" - - except (TypeError, ValueError) as e: - return False, "Invalid JSON structure" + return True, None, clean_html except Exception as e: - return False, "Failed to validate JSON content" - - return True, None - - -def _validate_json_content_array(content, depth=0): - """ - Validate JSON content array for suspicious patterns. - - Args: - content (list): Array of content nodes to validate - depth (int): Current recursion depth (default: 0) - - Returns: - tuple: (is_valid: bool, error_message: str or None) - """ - # Check recursion depth to prevent stack overflow - if depth > MAX_RECURSION_DEPTH: - return False, f"Maximum recursion depth ({MAX_RECURSION_DEPTH}) exceeded" - - if not isinstance(content, list): - return True, None - - for node in content: - if isinstance(node, dict): - # Check text content for suspicious patterns (more targeted) - if node.get("type") == "text" and "text" in node: - text_content = node["text"] - for pattern in DANGEROUS_TEXT_PATTERNS: - if re.search(pattern, text_content, re.IGNORECASE): - return ( - False, - "JSON content contains suspicious script patterns in text", - ) - - # Check attributes for suspicious content (more targeted) - if "attrs" in node and isinstance(node["attrs"], dict): - for attr_name, attr_value in node["attrs"].items(): - if isinstance(attr_value, str): - # Only check specific attributes that could be dangerous - if attr_name.lower() in [ - "href", - "src", - "action", - "onclick", - "onload", - "onerror", - ]: - for pattern in DANGEROUS_ATTR_PATTERNS: - if re.search(pattern, attr_value, re.IGNORECASE): - return ( - False, - f"JSON content contains dangerous pattern in {attr_name} attribute", - ) - - # Recursively check nested content - if "content" in node and isinstance(node["content"], list): - is_valid, error_msg = _validate_json_content_array( - node["content"], depth + 1 - ) - if not is_valid: - return False, error_msg - - return True, None - - -def _validate_json_content_recursive(obj, depth=0): - """ - Recursively validate JSON object for suspicious content. - - Args: - obj: JSON object (dict, list, or primitive) to validate - depth (int): Current recursion depth (default: 0) - - Returns: - tuple: (is_valid: bool, error_message: str or None) - """ - # Check recursion depth to prevent stack overflow - if depth > MAX_RECURSION_DEPTH: - return False, f"Maximum recursion depth ({MAX_RECURSION_DEPTH}) exceeded" - if isinstance(obj, dict): - for key, value in obj.items(): - if isinstance(value, str): - # Check for dangerous patterns using module constants - for pattern in DANGEROUS_TEXT_PATTERNS: - if re.search(pattern, value, re.IGNORECASE): - return ( - False, - "JSON content contains suspicious script patterns", - ) - elif isinstance(value, (dict, list)): - is_valid, error_msg = _validate_json_content_recursive(value, depth + 1) - if not is_valid: - return False, error_msg - elif isinstance(obj, list): - for item in obj: - is_valid, error_msg = _validate_json_content_recursive(item, depth + 1) - if not is_valid: - return False, error_msg - - return True, None + log_exception(e) + return False, "Failed to sanitize HTML", None diff --git a/apps/api/plane/utils/core/__init__.py b/apps/api/plane/utils/core/__init__.py new file mode 100644 index 000000000..37c6e3741 --- /dev/null +++ b/apps/api/plane/utils/core/__init__.py @@ -0,0 +1,21 @@ +""" +Core utilities for Plane database routing and request scoping. +This package contains essential components for managing read replica routing +and request-scoped context in the Plane application. +""" + +from .dbrouters import ReadReplicaRouter +from .mixins import ReadReplicaControlMixin +from .request_scope import ( + set_use_read_replica, + should_use_read_replica, + clear_read_replica_context, +) + +__all__ = [ + "ReadReplicaRouter", + "ReadReplicaControlMixin", + "set_use_read_replica", + "should_use_read_replica", + "clear_read_replica_context", +] diff --git a/apps/api/plane/utils/core/dbrouters.py b/apps/api/plane/utils/core/dbrouters.py new file mode 100644 index 000000000..2c5b67a27 --- /dev/null +++ b/apps/api/plane/utils/core/dbrouters.py @@ -0,0 +1,73 @@ +""" +Database router for read replica selection. +This router determines which database to use for read/write operations +based on the request context set by the ReadReplicaRoutingMiddleware. +""" + +import logging +from typing import Type + +from django.db import models + +from .request_scope import should_use_read_replica + +logger = logging.getLogger("plane.db") + + +class ReadReplicaRouter: + """ + Database router that directs read operations to replica when appropriate. + This router works in conjunction with ReadReplicaRoutingMiddleware to: + - Route read operations to replica database when request context allows + - Always route write operations to primary database + - Ensure migrations only run on primary database + """ + + def db_for_read(self, model: Type[models.Model], **hints) -> str: + """ + Determine which database to use for read operations. + Args: + model: The Django model class being queried + **hints: Additional routing hints + Returns: + str: Database alias ('replica' or 'default') + """ + if should_use_read_replica(): + logger.debug(f"Routing read for {model._meta.label} to replica database") + return "replica" + else: + logger.debug(f"Routing read for {model._meta.label} to primary database") + return "default" + + def db_for_write(self, model: Type[models.Model], **hints) -> str: + """ + Determine which database to use for write operations. + All write operations always go to the primary database to ensure + data consistency and avoid replication lag issues. + Args: + model: The Django model class being written to + **hints: Additional routing hints + Returns: + str: Always returns 'default' (primary database) + """ + logger.debug(f"Routing write for {model._meta.label} to primary database") + return "default" + + def allow_migrate( + self, db: str, app_label: str, model_name: str = None, **hints + ) -> bool: + """ + Ensure migrations only run on the primary database. + Args: + db: Database alias + app_label: Application label + model_name: Model name (optional) + **hints: Additional routing hints + Returns: + bool: True if migration is allowed on this database + """ + # Only allow migrations on the primary database + allowed = db == "default" + if not allowed: + logger.debug(f"Blocking migration for {app_label} on {db} database") + return allowed diff --git a/apps/api/plane/utils/core/mixins/__init__.py b/apps/api/plane/utils/core/mixins/__init__.py new file mode 100644 index 000000000..cedd9d455 --- /dev/null +++ b/apps/api/plane/utils/core/mixins/__init__.py @@ -0,0 +1,11 @@ +""" +Core mixins for read replica functionality. +This package provides mixins for different aspects of read replica management +in Django and Django REST Framework applications. +""" + +from .view import ReadReplicaControlMixin + +__all__ = [ + "ReadReplicaControlMixin", +] diff --git a/apps/api/plane/utils/core/mixins/view.py b/apps/api/plane/utils/core/mixins/view.py new file mode 100644 index 000000000..e15ec6771 --- /dev/null +++ b/apps/api/plane/utils/core/mixins/view.py @@ -0,0 +1,20 @@ +""" +Mixins for Django REST Framework views. +""" + + +class ReadReplicaControlMixin: + """ + Mixin to control read replica usage in DRF views. + Set use_read_replica = True/False to route read operations to + replica/primary database. Works with ReadReplicaRoutingMiddleware. + Usage: + class MyViewSet(ReadReplicaControlMixin, ModelViewSet): + use_read_replica = True # Use replica for GET requests + Note: + - Only affects GET, HEAD, OPTIONS requests + - Write operations always use primary database + - Defaults to True for safe replica usage + """ + + use_read_replica: bool = True diff --git a/apps/api/plane/utils/core/request_scope.py b/apps/api/plane/utils/core/request_scope.py new file mode 100644 index 000000000..b09e77101 --- /dev/null +++ b/apps/api/plane/utils/core/request_scope.py @@ -0,0 +1,72 @@ +""" +Database routing utilities for read replica selection. +This module provides request-scoped context management for database routing, +specifically for determining when to use read replicas vs primary database. +Used in conjunction with middleware and DRF views that set use_read_replica=True. +The context is maintained per request to ensure proper isolation between +concurrent requests in async environments. +""" + +from asgiref.local import Local + +__all__ = [ + "set_use_read_replica", + "should_use_read_replica", + "clear_read_replica_context", +] + +# Request-scoped context storage for database routing preferences +# Uses asgiref.local.Local which provides ContextVar under the hood +# This ensures proper context isolation per request in async environments +_db_routing_context = Local() + + +def set_use_read_replica(use_replica: bool) -> None: + """ + Mark the current request context to use read replica database. + This function sets a request-scoped flag that determines database routing. + The context is isolated per request to ensure thread safety in async environments. + This function is typically called from: + - Middleware that detects read-only operations + - DRF views with use_read_replica=True attribute + - API endpoints that only perform read operations + Args: + use_replica (bool): True to route database queries to read replica, + False to use primary database + Note: + The context is automatically isolated per request and should be + cleared at the end of each request using clear_read_replica_context(). + """ + _db_routing_context.use_read_replica = bool(use_replica) + + +def should_use_read_replica() -> bool: + """ + Check if the current request should use read replica database. + This function reads the request-scoped context to determine database routing. + It's called by the database router to decide which connection to use. + Returns: + bool: True if queries should be routed to read replica, + False if they should use primary database (default) + Note: + Returns False by default if no context is set for the current request. + The context is automatically isolated per request. + """ + return getattr(_db_routing_context, "use_read_replica", False) + + +def clear_read_replica_context() -> None: + """ + Clear the read replica context for the current request. + This function should be called at the end of each request to ensure + that context doesn't leak between requests. Typically called from + middleware during request cleanup. + This is important for: + - Preventing context leakage between requests + - Ensuring clean state for each new request + - Proper memory management in long-running processes + """ + try: + delattr(_db_routing_context, "use_read_replica") + except AttributeError: + pass diff --git a/apps/api/plane/utils/grouper.py b/apps/api/plane/utils/grouper.py index 89e154a7f..d69a1f583 100644 --- a/apps/api/plane/utils/grouper.py +++ b/apps/api/plane/utils/grouper.py @@ -1,7 +1,7 @@ # Django imports from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField -from django.db.models import Q, UUIDField, Value, QuerySet +from django.db.models import Q, UUIDField, Value, QuerySet, OuterRef, Subquery from django.db.models.functions import Coalesce # Module imports @@ -14,6 +14,9 @@ from plane.db.models import ( ProjectMember, State, WorkspaceMember, + IssueAssignee, + ModuleIssue, + IssueLabel, ) from typing import Optional, Dict, Tuple, Any, Union, List @@ -39,33 +42,52 @@ def issue_queryset_grouper( if group_key in GROUP_FILTER_MAPPER: queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key]) + issue_assignee_subquery = Subquery( + IssueAssignee.objects.filter( + issue_id=OuterRef("pk"), + deleted_at__isnull=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("assignee_id", distinct=True)) + .values("arr") + ) + + issue_module_subquery = Subquery( + ModuleIssue.objects.filter( + issue_id=OuterRef("pk"), + deleted_at__isnull=True, + module__archived_at__isnull=True, + ) + .values("issue_id") + .annotate(arr=ArrayAgg("module_id", distinct=True)) + .values("arr") + ) + + issue_label_subquery = Subquery( + IssueLabel.objects.filter(issue_id=OuterRef("pk"), deleted_at__isnull=True) + .values("issue_id") + .annotate(arr=ArrayAgg("label_id", distinct=True)) + .values("arr") + ) + annotations_map: Dict[str, Tuple[str, Q]] = { - "assignee_ids": ( - "assignees__id", - ~Q(assignees__id__isnull=True) & Q(issue_assignee__deleted_at__isnull=True), + "assignee_ids": Coalesce( + issue_assignee_subquery, Value([], output_field=ArrayField(UUIDField())) ), - "label_ids": ( - "labels__id", - ~Q(labels__id__isnull=True) & Q(label_issue__deleted_at__isnull=True), + "label_ids": Coalesce( + issue_label_subquery, Value([], output_field=ArrayField(UUIDField())) ), - "module_ids": ( - "issue_module__module_id", - ( - ~Q(issue_module__module_id__isnull=True) - & Q(issue_module__module__archived_at__isnull=True) - & Q(issue_module__deleted_at__isnull=True) - ), + "module_ids": Coalesce( + issue_module_subquery, Value([], output_field=ArrayField(UUIDField())) ), } - default_annotations: Dict[str, Any] = { - key: Coalesce( - ArrayAgg(field, distinct=True, filter=condition), - Value([], output_field=ArrayField(UUIDField())), - ) - for key, (field, condition) in annotations_map.items() - if FIELD_MAPPER.get(key) != group_by or FIELD_MAPPER.get(key) != sub_group_by - } + default_annotations: Dict[str, Any] = {} + + for key, expression in annotations_map.items(): + if FIELD_MAPPER.get(key) in {group_by, sub_group_by}: + continue + default_annotations[key] = expression return queryset.annotate(**default_annotations) diff --git a/apps/api/plane/utils/openapi/README.md b/apps/api/plane/utils/openapi/README.md new file mode 100644 index 000000000..9ac82cdd3 --- /dev/null +++ b/apps/api/plane/utils/openapi/README.md @@ -0,0 +1,102 @@ +# OpenAPI Utilities Module + +This module provides a well-organized structure for OpenAPI/drf-spectacular utilities, replacing the monolithic `openapi_spec_helpers.py` file with a more maintainable modular approach. + +## Structure + +``` +plane/utils/openapi/ +├── __init__.py # Main module that re-exports everything +├── auth.py # Authentication extensions +├── parameters.py # Common OpenAPI parameters +├── responses.py # Common OpenAPI responses +├── examples.py # Common OpenAPI examples +├── decorators.py # Helper decorators for different endpoint types +└── hooks.py # Schema processing hooks (pre/post processing) +``` + +## Usage + +### Import Everything (Recommended for backwards compatibility) +```python +from plane.utils.openapi import ( + asset_docs, + ASSET_ID_PARAMETER, + UNAUTHORIZED_RESPONSE, + # ... other imports +) +``` + +### Import from Specific Modules (Recommended for new code) +```python +from plane.utils.openapi.decorators import asset_docs +from plane.utils.openapi.parameters import ASSET_ID_PARAMETER +from plane.utils.openapi.responses import UNAUTHORIZED_RESPONSE +``` + +## Module Contents + +### auth.py +- `APIKeyAuthenticationExtension` - X-API-Key authentication +- `APITokenAuthenticationExtension` - Bearer token authentication + +### parameters.py +- Path parameters: `WORKSPACE_SLUG_PARAMETER`, `PROJECT_ID_PARAMETER`, `ISSUE_ID_PARAMETER`, `ASSET_ID_PARAMETER` +- Query parameters: `CURSOR_PARAMETER`, `PER_PAGE_PARAMETER` + +### responses.py +- Auth responses: `UNAUTHORIZED_RESPONSE`, `FORBIDDEN_RESPONSE` +- Resource responses: `NOT_FOUND_RESPONSE`, `VALIDATION_ERROR_RESPONSE` +- Asset responses: `PRESIGNED_URL_SUCCESS_RESPONSE`, `ASSET_UPDATED_RESPONSE`, etc. +- Generic asset responses: `GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE`, `ASSET_DOWNLOAD_SUCCESS_RESPONSE`, etc. + +### examples.py +- `FILE_UPLOAD_EXAMPLE`, `WORKSPACE_EXAMPLE`, `PROJECT_EXAMPLE`, `ISSUE_EXAMPLE` + +### decorators.py +- `workspace_docs()` - For workspace endpoints +- `project_docs()` - For project endpoints +- `issue_docs()` - For issue/work item endpoints +- `asset_docs()` - For asset endpoints + +### hooks.py +- `preprocess_filter_api_v1_paths()` - Filters API v1 paths +- `postprocess_assign_tags()` - Assigns tags based on URL patterns +- `generate_operation_summary()` - Generates operation summaries + +## Migration Status + +✅ **FULLY COMPLETE** - All components from the legacy `openapi_spec_helpers.py` have been successfully migrated to this modular structure and the old file has been completely removed. All imports have been updated to use the new modular structure. + +### What was migrated: +- ✅ All authentication extensions +- ✅ All common parameters and responses +- ✅ All helper decorators +- ✅ All schema processing hooks +- ✅ All examples and reusable components +- ✅ All asset view decorators converted to use new helpers +- ✅ All view imports updated to new module paths +- ✅ Legacy file completely removed + +### Files updated: +- `plane/api/views/asset.py` - All methods use new `@asset_docs` helpers +- `plane/api/views/project.py` - Import updated +- `plane/api/views/user.py` - Import updated +- `plane/api/views/state.py` - Import updated +- `plane/api/views/intake.py` - Import updated +- `plane/api/views/member.py` - Import updated +- `plane/api/views/module.py` - Import updated +- `plane/api/views/cycle.py` - Import updated +- `plane/api/views/issue.py` - Import updated +- `plane/settings/common.py` - Hook paths updated +- `plane/api/apps.py` - Auth extension import updated + +## Benefits + +1. **Better Organization**: Related functionality is grouped together +2. **Easier Maintenance**: Changes to specific areas only affect relevant files +3. **Improved Discoverability**: Clear module names make it easy to find what you need +4. **Backwards Compatibility**: All existing imports continue to work +5. **Reduced Coupling**: Import only what you need from specific modules +6. **Consistent Documentation**: All endpoints now use standardized helpers +7. **Massive Code Reduction**: ~80% reduction in decorator bloat using reusable components \ No newline at end of file diff --git a/apps/api/plane/utils/openapi/__init__.py b/apps/api/plane/utils/openapi/__init__.py new file mode 100644 index 000000000..bf6821258 --- /dev/null +++ b/apps/api/plane/utils/openapi/__init__.py @@ -0,0 +1,315 @@ +""" +OpenAPI utilities for drf-spectacular integration. + +This module provides reusable components for API documentation: +- Authentication extensions +- Common parameters and responses +- Helper decorators +- Schema preprocessing hooks +- Examples +""" + +# Authentication extensions +from .auth import APIKeyAuthenticationExtension + +# Parameters +from .parameters import ( + WORKSPACE_SLUG_PARAMETER, + PROJECT_ID_PARAMETER, + PROJECT_PK_PARAMETER, + PROJECT_IDENTIFIER_PARAMETER, + ISSUE_IDENTIFIER_PARAMETER, + ASSET_ID_PARAMETER, + CYCLE_ID_PARAMETER, + MODULE_ID_PARAMETER, + MODULE_PK_PARAMETER, + ISSUE_ID_PARAMETER, + STATE_ID_PARAMETER, + LABEL_ID_PARAMETER, + COMMENT_ID_PARAMETER, + LINK_ID_PARAMETER, + ATTACHMENT_ID_PARAMETER, + ACTIVITY_ID_PARAMETER, + CURSOR_PARAMETER, + PER_PAGE_PARAMETER, + EXTERNAL_ID_PARAMETER, + EXTERNAL_SOURCE_PARAMETER, + ORDER_BY_PARAMETER, + SEARCH_PARAMETER, + SEARCH_PARAMETER_REQUIRED, + LIMIT_PARAMETER, + WORKSPACE_SEARCH_PARAMETER, + PROJECT_ID_QUERY_PARAMETER, + CYCLE_VIEW_PARAMETER, + FIELDS_PARAMETER, + EXPAND_PARAMETER, +) + +# Responses +from .responses import ( + UNAUTHORIZED_RESPONSE, + FORBIDDEN_RESPONSE, + NOT_FOUND_RESPONSE, + VALIDATION_ERROR_RESPONSE, + DELETED_RESPONSE, + ARCHIVED_RESPONSE, + UNARCHIVED_RESPONSE, + INVALID_REQUEST_RESPONSE, + CONFLICT_RESPONSE, + ADMIN_ONLY_RESPONSE, + CANNOT_DELETE_RESPONSE, + CANNOT_ARCHIVE_RESPONSE, + REQUIRED_FIELDS_RESPONSE, + PROJECT_NOT_FOUND_RESPONSE, + WORKSPACE_NOT_FOUND_RESPONSE, + PROJECT_NAME_TAKEN_RESPONSE, + ISSUE_NOT_FOUND_RESPONSE, + WORK_ITEM_NOT_FOUND_RESPONSE, + EXTERNAL_ID_EXISTS_RESPONSE, + LABEL_NOT_FOUND_RESPONSE, + LABEL_NAME_EXISTS_RESPONSE, + MODULE_NOT_FOUND_RESPONSE, + MODULE_ISSUE_NOT_FOUND_RESPONSE, + CYCLE_CANNOT_ARCHIVE_RESPONSE, + STATE_NAME_EXISTS_RESPONSE, + STATE_CANNOT_DELETE_RESPONSE, + COMMENT_NOT_FOUND_RESPONSE, + LINK_NOT_FOUND_RESPONSE, + ATTACHMENT_NOT_FOUND_RESPONSE, + BAD_SEARCH_REQUEST_RESPONSE, + PRESIGNED_URL_SUCCESS_RESPONSE, + GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE, + GENERIC_ASSET_VALIDATION_ERROR_RESPONSE, + ASSET_CONFLICT_RESPONSE, + ASSET_DOWNLOAD_SUCCESS_RESPONSE, + ASSET_DOWNLOAD_ERROR_RESPONSE, + ASSET_UPDATED_RESPONSE, + ASSET_DELETED_RESPONSE, + ASSET_NOT_FOUND_RESPONSE, + create_paginated_response, +) + +# Examples +from .examples import ( + FILE_UPLOAD_EXAMPLE, + WORKSPACE_EXAMPLE, + PROJECT_EXAMPLE, + ISSUE_EXAMPLE, + USER_EXAMPLE, + get_sample_for_schema, + # Request Examples + ISSUE_CREATE_EXAMPLE, + ISSUE_UPDATE_EXAMPLE, + ISSUE_UPSERT_EXAMPLE, + LABEL_CREATE_EXAMPLE, + LABEL_UPDATE_EXAMPLE, + ISSUE_LINK_CREATE_EXAMPLE, + ISSUE_LINK_UPDATE_EXAMPLE, + ISSUE_COMMENT_CREATE_EXAMPLE, + ISSUE_COMMENT_UPDATE_EXAMPLE, + ISSUE_ATTACHMENT_UPLOAD_EXAMPLE, + ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE, + CYCLE_CREATE_EXAMPLE, + CYCLE_UPDATE_EXAMPLE, + CYCLE_ISSUE_REQUEST_EXAMPLE, + TRANSFER_CYCLE_ISSUE_EXAMPLE, + MODULE_CREATE_EXAMPLE, + MODULE_UPDATE_EXAMPLE, + MODULE_ISSUE_REQUEST_EXAMPLE, + PROJECT_CREATE_EXAMPLE, + PROJECT_UPDATE_EXAMPLE, + STATE_CREATE_EXAMPLE, + STATE_UPDATE_EXAMPLE, + INTAKE_ISSUE_CREATE_EXAMPLE, + INTAKE_ISSUE_UPDATE_EXAMPLE, + # Response Examples + CYCLE_EXAMPLE, + TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE, + TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE, + TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE, + MODULE_EXAMPLE, + STATE_EXAMPLE, + LABEL_EXAMPLE, + ISSUE_LINK_EXAMPLE, + ISSUE_COMMENT_EXAMPLE, + ISSUE_ATTACHMENT_EXAMPLE, + ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE, + INTAKE_ISSUE_EXAMPLE, + MODULE_ISSUE_EXAMPLE, + ISSUE_SEARCH_EXAMPLE, + WORKSPACE_MEMBER_EXAMPLE, + PROJECT_MEMBER_EXAMPLE, + CYCLE_ISSUE_EXAMPLE, +) + +# Helper decorators +from .decorators import ( + workspace_docs, + project_docs, + issue_docs, + intake_docs, + asset_docs, + user_docs, + cycle_docs, + work_item_docs, + label_docs, + issue_link_docs, + issue_comment_docs, + issue_activity_docs, + issue_attachment_docs, + module_docs, + module_issue_docs, + state_docs, +) + +# Schema processing hooks +from .hooks import ( + preprocess_filter_api_v1_paths, + generate_operation_summary, +) + +__all__ = [ + # Authentication + "APIKeyAuthenticationExtension", + # Parameters + "WORKSPACE_SLUG_PARAMETER", + "PROJECT_ID_PARAMETER", + "PROJECT_PK_PARAMETER", + "PROJECT_IDENTIFIER_PARAMETER", + "ISSUE_IDENTIFIER_PARAMETER", + "ASSET_ID_PARAMETER", + "CYCLE_ID_PARAMETER", + "MODULE_ID_PARAMETER", + "MODULE_PK_PARAMETER", + "ISSUE_ID_PARAMETER", + "STATE_ID_PARAMETER", + "LABEL_ID_PARAMETER", + "COMMENT_ID_PARAMETER", + "LINK_ID_PARAMETER", + "ATTACHMENT_ID_PARAMETER", + "ACTIVITY_ID_PARAMETER", + "CURSOR_PARAMETER", + "PER_PAGE_PARAMETER", + "EXTERNAL_ID_PARAMETER", + "EXTERNAL_SOURCE_PARAMETER", + "ORDER_BY_PARAMETER", + "SEARCH_PARAMETER", + "SEARCH_PARAMETER_REQUIRED", + "LIMIT_PARAMETER", + "WORKSPACE_SEARCH_PARAMETER", + "PROJECT_ID_QUERY_PARAMETER", + "CYCLE_VIEW_PARAMETER", + "FIELDS_PARAMETER", + "EXPAND_PARAMETER", + # Responses + "UNAUTHORIZED_RESPONSE", + "FORBIDDEN_RESPONSE", + "NOT_FOUND_RESPONSE", + "VALIDATION_ERROR_RESPONSE", + "DELETED_RESPONSE", + "ARCHIVED_RESPONSE", + "UNARCHIVED_RESPONSE", + "INVALID_REQUEST_RESPONSE", + "CONFLICT_RESPONSE", + "ADMIN_ONLY_RESPONSE", + "CANNOT_DELETE_RESPONSE", + "CANNOT_ARCHIVE_RESPONSE", + "REQUIRED_FIELDS_RESPONSE", + "PROJECT_NOT_FOUND_RESPONSE", + "WORKSPACE_NOT_FOUND_RESPONSE", + "PROJECT_NAME_TAKEN_RESPONSE", + "ISSUE_NOT_FOUND_RESPONSE", + "WORK_ITEM_NOT_FOUND_RESPONSE", + "EXTERNAL_ID_EXISTS_RESPONSE", + "LABEL_NOT_FOUND_RESPONSE", + "LABEL_NAME_EXISTS_RESPONSE", + "MODULE_NOT_FOUND_RESPONSE", + "MODULE_ISSUE_NOT_FOUND_RESPONSE", + "CYCLE_CANNOT_ARCHIVE_RESPONSE", + "STATE_NAME_EXISTS_RESPONSE", + "STATE_CANNOT_DELETE_RESPONSE", + "COMMENT_NOT_FOUND_RESPONSE", + "LINK_NOT_FOUND_RESPONSE", + "ATTACHMENT_NOT_FOUND_RESPONSE", + "BAD_SEARCH_REQUEST_RESPONSE", + "create_paginated_response", + "PRESIGNED_URL_SUCCESS_RESPONSE", + "GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE", + "GENERIC_ASSET_VALIDATION_ERROR_RESPONSE", + "ASSET_CONFLICT_RESPONSE", + "ASSET_DOWNLOAD_SUCCESS_RESPONSE", + "ASSET_DOWNLOAD_ERROR_RESPONSE", + "ASSET_UPDATED_RESPONSE", + "ASSET_DELETED_RESPONSE", + "ASSET_NOT_FOUND_RESPONSE", + # Examples + "FILE_UPLOAD_EXAMPLE", + "WORKSPACE_EXAMPLE", + "PROJECT_EXAMPLE", + "ISSUE_EXAMPLE", + "USER_EXAMPLE", + "get_sample_for_schema", + # Request Examples + "ISSUE_CREATE_EXAMPLE", + "ISSUE_UPDATE_EXAMPLE", + "ISSUE_UPSERT_EXAMPLE", + "LABEL_CREATE_EXAMPLE", + "LABEL_UPDATE_EXAMPLE", + "ISSUE_LINK_CREATE_EXAMPLE", + "ISSUE_LINK_UPDATE_EXAMPLE", + "ISSUE_COMMENT_CREATE_EXAMPLE", + "ISSUE_COMMENT_UPDATE_EXAMPLE", + "ISSUE_ATTACHMENT_UPLOAD_EXAMPLE", + "ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE", + "CYCLE_CREATE_EXAMPLE", + "CYCLE_UPDATE_EXAMPLE", + "CYCLE_ISSUE_REQUEST_EXAMPLE", + "TRANSFER_CYCLE_ISSUE_EXAMPLE", + "MODULE_CREATE_EXAMPLE", + "MODULE_UPDATE_EXAMPLE", + "MODULE_ISSUE_REQUEST_EXAMPLE", + "PROJECT_CREATE_EXAMPLE", + "PROJECT_UPDATE_EXAMPLE", + "STATE_CREATE_EXAMPLE", + "STATE_UPDATE_EXAMPLE", + "INTAKE_ISSUE_CREATE_EXAMPLE", + "INTAKE_ISSUE_UPDATE_EXAMPLE", + # Response Examples + "CYCLE_EXAMPLE", + "TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE", + "TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE", + "TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE", + "MODULE_EXAMPLE", + "STATE_EXAMPLE", + "LABEL_EXAMPLE", + "ISSUE_LINK_EXAMPLE", + "ISSUE_COMMENT_EXAMPLE", + "ISSUE_ATTACHMENT_EXAMPLE", + "ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE", + "INTAKE_ISSUE_EXAMPLE", + "MODULE_ISSUE_EXAMPLE", + "ISSUE_SEARCH_EXAMPLE", + "WORKSPACE_MEMBER_EXAMPLE", + "PROJECT_MEMBER_EXAMPLE", + "CYCLE_ISSUE_EXAMPLE", + # Decorators + "workspace_docs", + "project_docs", + "issue_docs", + "intake_docs", + "asset_docs", + "user_docs", + "cycle_docs", + "work_item_docs", + "label_docs", + "issue_link_docs", + "issue_comment_docs", + "issue_activity_docs", + "issue_attachment_docs", + "module_docs", + "module_issue_docs", + "state_docs", + # Hooks + "preprocess_filter_api_v1_paths", + "generate_operation_summary", +] diff --git a/apps/api/plane/utils/openapi/auth.py b/apps/api/plane/utils/openapi/auth.py new file mode 100644 index 000000000..e6012cc4e --- /dev/null +++ b/apps/api/plane/utils/openapi/auth.py @@ -0,0 +1,29 @@ +""" +OpenAPI authentication extensions for drf-spectacular. + +This module provides authentication extensions that automatically register +custom authentication classes with the OpenAPI schema generator. +""" + +from drf_spectacular.extensions import OpenApiAuthenticationExtension + + +class APIKeyAuthenticationExtension(OpenApiAuthenticationExtension): + """ + OpenAPI authentication extension for plane.api.middleware.api_authentication.APIKeyAuthentication + """ + + target_class = "plane.api.middleware.api_authentication.APIKeyAuthentication" + name = "ApiKeyAuthentication" + priority = 1 + + def get_security_definition(self, auto_schema): + """ + Return the security definition for API key authentication. + """ + return { + "type": "apiKey", + "in": "header", + "name": "X-API-Key", + "description": "API key authentication. Provide your API key in the X-API-Key header.", + } diff --git a/apps/api/plane/utils/openapi/decorators.py b/apps/api/plane/utils/openapi/decorators.py new file mode 100644 index 000000000..e4a86839f --- /dev/null +++ b/apps/api/plane/utils/openapi/decorators.py @@ -0,0 +1,264 @@ +""" +Helper decorators for drf-spectacular OpenAPI documentation. + +This module provides domain-specific decorators that apply common +parameters, responses, and tags to API endpoints based on their context. +""" + +from drf_spectacular.utils import extend_schema +from .parameters import WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER +from .responses import UNAUTHORIZED_RESPONSE, FORBIDDEN_RESPONSE, NOT_FOUND_RESPONSE + + +def _merge_schema_options(defaults, kwargs): + """Helper function to merge responses and parameters from kwargs into defaults""" + # Merge responses + if "responses" in kwargs: + defaults["responses"].update(kwargs["responses"]) + kwargs = {k: v for k, v in kwargs.items() if k != "responses"} + + # Merge parameters + if "parameters" in kwargs: + defaults["parameters"].extend(kwargs["parameters"]) + kwargs = {k: v for k, v in kwargs.items() if k != "parameters"} + + defaults.update(kwargs) + return defaults + + +def user_docs(**kwargs): + """Decorator for user-related endpoints""" + defaults = { + "tags": ["Users"], + "parameters": [], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def workspace_docs(**kwargs): + """Decorator for workspace-related endpoints""" + defaults = { + "tags": ["Workspaces"], + "parameters": [WORKSPACE_SLUG_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def project_docs(**kwargs): + """Decorator for project-related endpoints""" + defaults = { + "tags": ["Projects"], + "parameters": [WORKSPACE_SLUG_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def cycle_docs(**kwargs): + """Decorator for cycle-related endpoints""" + defaults = { + "tags": ["Cycles"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def issue_docs(**kwargs): + """Decorator for issue-related endpoints""" + defaults = { + "tags": ["Work Items"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def intake_docs(**kwargs): + """Decorator for intake-related endpoints""" + defaults = { + "tags": ["Intake"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def asset_docs(**kwargs): + """Decorator for asset-related endpoints with common defaults""" + defaults = { + "tags": ["Assets"], + "parameters": [], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +# Issue-related decorators for specific tags +def work_item_docs(**kwargs): + """Decorator for work item endpoints (main issue operations)""" + defaults = { + "tags": ["Work Items"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def label_docs(**kwargs): + """Decorator for label management endpoints""" + defaults = { + "tags": ["Labels"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def issue_link_docs(**kwargs): + """Decorator for issue link endpoints""" + defaults = { + "tags": ["Work Item Links"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def issue_comment_docs(**kwargs): + """Decorator for issue comment endpoints""" + defaults = { + "tags": ["Work Item Comments"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def issue_activity_docs(**kwargs): + """Decorator for issue activity/search endpoints""" + defaults = { + "tags": ["Work Item Activity"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def issue_attachment_docs(**kwargs): + """Decorator for issue attachment endpoints""" + defaults = { + "tags": ["Work Item Attachments"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def module_docs(**kwargs): + """Decorator for module management endpoints""" + defaults = { + "tags": ["Modules"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def module_issue_docs(**kwargs): + """Decorator for module issue management endpoints""" + defaults = { + "tags": ["Modules"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) + + +def state_docs(**kwargs): + """Decorator for state management endpoints""" + defaults = { + "tags": ["States"], + "parameters": [WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER], + "responses": { + 401: UNAUTHORIZED_RESPONSE, + 403: FORBIDDEN_RESPONSE, + 404: NOT_FOUND_RESPONSE, + }, + } + + return extend_schema(**_merge_schema_options(defaults, kwargs)) diff --git a/apps/api/plane/utils/openapi/examples.py b/apps/api/plane/utils/openapi/examples.py new file mode 100644 index 000000000..136669159 --- /dev/null +++ b/apps/api/plane/utils/openapi/examples.py @@ -0,0 +1,816 @@ +""" +Common OpenAPI examples for drf-spectacular. + +This module provides reusable example data for API responses and requests +to make the generated documentation more helpful and realistic. +""" + +from drf_spectacular.utils import OpenApiExample + + +# File Upload Examples +FILE_UPLOAD_EXAMPLE = OpenApiExample( + name="File Upload Success", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "asset": "uploads/workspace_1/file_example.pdf", + "attributes": { + "name": "example-document.pdf", + "size": 1024000, + "mimetype": "application/pdf", + }, + "created_at": "2024-01-15T10:30:00Z", + "updated_at": "2024-01-15T10:30:00Z", + }, +) + + +# Workspace Examples +WORKSPACE_EXAMPLE = OpenApiExample( + name="Workspace", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "My Workspace", + "slug": "my-workspace", + "organization_size": "1-10", + "created_at": "2024-01-15T10:30:00Z", + "updated_at": "2024-01-15T10:30:00Z", + }, +) + + +# Project Examples +PROJECT_EXAMPLE = OpenApiExample( + name="Project", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Mobile App Development", + "description": "Development of the mobile application", + "identifier": "MAD", + "network": 2, + "project_lead": "550e8400-e29b-41d4-a716-446655440001", + "created_at": "2024-01-15T10:30:00Z", + "updated_at": "2024-01-15T10:30:00Z", + }, +) + + +# Issue Examples +ISSUE_EXAMPLE = OpenApiExample( + name="Issue", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Implement user authentication", + "description": "Add OAuth 2.0 authentication flow", + "sequence_id": 1, + "priority": "high", + "assignees": ["550e8400-e29b-41d4-a716-446655440001"], + "labels": ["550e8400-e29b-41d4-a716-446655440002"], + "created_at": "2024-01-15T10:30:00Z", + "updated_at": "2024-01-15T10:30:00Z", + }, +) + + +# User Examples +USER_EXAMPLE = OpenApiExample( + name="User", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "first_name": "John", + "last_name": "Doe", + "email": "john.doe@example.com", + "avatar": "https://example.com/avatar.jpg", + "avatar_url": "https://example.com/avatar.jpg", + "display_name": "John Doe", + }, +) + + +# ============================================================================ +# REQUEST EXAMPLES - Centralized examples for API requests +# ============================================================================ + +# Work Item / Issue Examples +ISSUE_CREATE_EXAMPLE = OpenApiExample( + "IssueCreateSerializer", + value={ + "name": "New Issue", + "description": "New issue description", + "priority": "medium", + "state": "0ec6cfa4-e906-4aad-9390-2df0303a41cd", + "assignees": ["0ec6cfa4-e906-4aad-9390-2df0303a41cd"], + "labels": ["0ec6cfa4-e906-4aad-9390-2df0303a41ce"], + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating a work item", +) + +ISSUE_UPDATE_EXAMPLE = OpenApiExample( + "IssueUpdateSerializer", + value={ + "name": "Updated Issue", + "description": "Updated issue description", + "priority": "medium", + "state": "0ec6cfa4-e906-4aad-9390-2df0303a41cd", + "assignees": ["0ec6cfa4-e906-4aad-9390-2df0303a41cd"], + "labels": ["0ec6cfa4-e906-4aad-9390-2df0303a41ce"], + }, + description="Example request for updating a work item", +) + +ISSUE_UPSERT_EXAMPLE = OpenApiExample( + "IssueUpsertSerializer", + value={ + "name": "Updated Issue via External ID", + "description": "Updated issue description", + "priority": "high", + "state": "0ec6cfa4-e906-4aad-9390-2df0303a41cd", + "assignees": ["0ec6cfa4-e906-4aad-9390-2df0303a41cd"], + "labels": ["0ec6cfa4-e906-4aad-9390-2df0303a41ce"], + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for upserting a work item via external ID", +) + +# Label Examples +LABEL_CREATE_EXAMPLE = OpenApiExample( + "LabelCreateUpdateSerializer", + value={ + "name": "New Label", + "color": "#ff0000", + "description": "New label description", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating a label", +) + +LABEL_UPDATE_EXAMPLE = OpenApiExample( + "LabelCreateUpdateSerializer", + value={ + "name": "Updated Label", + "color": "#00ff00", + "description": "Updated label description", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for updating a label", +) + +# Issue Link Examples +ISSUE_LINK_CREATE_EXAMPLE = OpenApiExample( + "IssueLinkCreateSerializer", + value={ + "url": "https://example.com", + "title": "Example Link", + }, + description="Example request for creating an issue link", +) + +ISSUE_LINK_UPDATE_EXAMPLE = OpenApiExample( + "IssueLinkUpdateSerializer", + value={ + "url": "https://example.com", + "title": "Updated Link", + }, + description="Example request for updating an issue link", +) + +# Issue Comment Examples +ISSUE_COMMENT_CREATE_EXAMPLE = OpenApiExample( + "IssueCommentCreateSerializer", + value={ + "comment_html": "

New comment content

", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating an issue comment", +) + +ISSUE_COMMENT_UPDATE_EXAMPLE = OpenApiExample( + "IssueCommentCreateSerializer", + value={ + "comment_html": "

Updated comment content

", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for updating an issue comment", +) + +# Issue Attachment Examples +ISSUE_ATTACHMENT_UPLOAD_EXAMPLE = OpenApiExample( + "IssueAttachmentUploadSerializer", + value={ + "name": "document.pdf", + "type": "application/pdf", + "size": 1024000, + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating an issue attachment", +) + +ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE = OpenApiExample( + "ConfirmUpload", + value={"is_uploaded": True}, + description="Confirm that the attachment has been successfully uploaded", +) + +# Cycle Examples +CYCLE_CREATE_EXAMPLE = OpenApiExample( + "CycleCreateSerializer", + value={ + "name": "Cycle 1", + "description": "Cycle 1 description", + "start_date": "2021-01-01", + "end_date": "2021-01-31", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating a cycle", +) + +CYCLE_UPDATE_EXAMPLE = OpenApiExample( + "CycleUpdateSerializer", + value={ + "name": "Updated Cycle", + "description": "Updated cycle description", + "start_date": "2021-01-01", + "end_date": "2021-01-31", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for updating a cycle", +) + +CYCLE_ISSUE_REQUEST_EXAMPLE = OpenApiExample( + "CycleIssueRequestSerializer", + value={ + "issues": [ + "0ec6cfa4-e906-4aad-9390-2df0303a41cd", + "0ec6cfa4-e906-4aad-9390-2df0303a41ce", + ], + }, + description="Example request for adding cycle issues", +) + +TRANSFER_CYCLE_ISSUE_EXAMPLE = OpenApiExample( + "TransferCycleIssueRequestSerializer", + value={ + "new_cycle_id": "0ec6cfa4-e906-4aad-9390-2df0303a41ce", + }, + description="Example request for transferring cycle issues", +) + +# Module Examples +MODULE_CREATE_EXAMPLE = OpenApiExample( + "ModuleCreateSerializer", + value={ + "name": "New Module", + "description": "New module description", + "start_date": "2021-01-01", + "end_date": "2021-01-31", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating a module", +) + +MODULE_UPDATE_EXAMPLE = OpenApiExample( + "ModuleUpdateSerializer", + value={ + "name": "Updated Module", + "description": "Updated module description", + "start_date": "2021-01-01", + "end_date": "2021-01-31", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for updating a module", +) + +MODULE_ISSUE_REQUEST_EXAMPLE = OpenApiExample( + "ModuleIssueRequestSerializer", + value={ + "issues": [ + "0ec6cfa4-e906-4aad-9390-2df0303a41cd", + "0ec6cfa4-e906-4aad-9390-2df0303a41ce", + ], + }, + description="Example request for adding module issues", +) + +# Project Examples +PROJECT_CREATE_EXAMPLE = OpenApiExample( + "ProjectCreateSerializer", + value={ + "name": "New Project", + "description": "New project description", + "identifier": "new-project", + "project_lead": "0ec6cfa4-e906-4aad-9390-2df0303a41ce", + }, + description="Example request for creating a project", +) + +PROJECT_UPDATE_EXAMPLE = OpenApiExample( + "ProjectUpdateSerializer", + value={ + "name": "Updated Project", + "description": "Updated project description", + "identifier": "updated-project", + "project_lead": "0ec6cfa4-e906-4aad-9390-2df0303a41ce", + }, + description="Example request for updating a project", +) + +# State Examples +STATE_CREATE_EXAMPLE = OpenApiExample( + "StateCreateSerializer", + value={ + "name": "New State", + "color": "#ff0000", + "group": "backlog", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for creating a state", +) + +STATE_UPDATE_EXAMPLE = OpenApiExample( + "StateUpdateSerializer", + value={ + "name": "Updated State", + "color": "#00ff00", + "group": "backlog", + "external_id": "1234567890", + "external_source": "github", + }, + description="Example request for updating a state", +) + +# Intake Examples +INTAKE_ISSUE_CREATE_EXAMPLE = OpenApiExample( + "IntakeIssueCreateSerializer", + value={ + "issue": { + "name": "New Issue", + "description": "New issue description", + "priority": "medium", + } + }, + description="Example request for creating an intake issue", +) + +INTAKE_ISSUE_UPDATE_EXAMPLE = OpenApiExample( + "IntakeIssueUpdateSerializer", + value={ + "status": 1, + "issue": { + "name": "Updated Issue", + "description": "Updated issue description", + "priority": "high", + }, + }, + description="Example request for updating an intake issue", +) + + +# ============================================================================ +# RESPONSE EXAMPLES - Centralized examples for API responses +# ============================================================================ + +# Cycle Response Examples +CYCLE_EXAMPLE = OpenApiExample( + name="Cycle", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Sprint 1 - Q1 2024", + "description": "First sprint of the quarter focusing on core features", + "start_date": "2024-01-01", + "end_date": "2024-01-14", + "status": "current", + "total_issues": 15, + "completed_issues": 8, + "cancelled_issues": 1, + "started_issues": 4, + "unstarted_issues": 2, + "backlog_issues": 0, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Transfer Cycle Issue Response Examples +TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE = OpenApiExample( + name="Transfer Cycle Issue Success", + value={ + "message": "Success", + }, + description="Successful transfer of cycle issues to new cycle", +) + +TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE = OpenApiExample( + name="Transfer Cycle Issue Error", + value={ + "error": "New Cycle Id is required", + }, + description="Error when required cycle ID is missing", +) + +TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE = OpenApiExample( + name="Transfer to Completed Cycle Error", + value={ + "error": "The cycle where the issues are transferred is already completed", + }, + description="Error when trying to transfer to a completed cycle", +) + +# Module Response Examples +MODULE_EXAMPLE = OpenApiExample( + name="Module", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Authentication Module", + "description": "User authentication and authorization features", + "start_date": "2024-01-01", + "target_date": "2024-02-15", + "status": "in-progress", + "total_issues": 12, + "completed_issues": 5, + "cancelled_issues": 0, + "started_issues": 4, + "unstarted_issues": 3, + "backlog_issues": 0, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# State Response Examples +STATE_EXAMPLE = OpenApiExample( + name="State", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "In Progress", + "color": "#f39c12", + "group": "started", + "sequence": 2, + "default": False, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Label Response Examples +LABEL_EXAMPLE = OpenApiExample( + name="Label", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "bug", + "color": "#ff4444", + "description": "Issues that represent bugs in the system", + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Issue Link Response Examples +ISSUE_LINK_EXAMPLE = OpenApiExample( + name="IssueLink", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "url": "https://github.com/example/repo/pull/123", + "title": "Fix authentication bug", + "metadata": { + "title": "Fix authentication bug", + "description": "Pull request to fix authentication timeout issue", + "image": "https://github.com/example/repo/avatar.png", + }, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Issue Comment Response Examples +ISSUE_COMMENT_EXAMPLE = OpenApiExample( + name="IssueComment", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "comment_html": "

This issue has been resolved by implementing OAuth 2.0 flow.

", + "comment_json": { + "type": "doc", + "content": [ + { + "type": "paragraph", + "content": [ + { + "type": "text", + "text": "This issue has been resolved by implementing OAuth 2.0 flow.", + } + ], + } + ], + }, + "actor": { + "id": "550e8400-e29b-41d4-a716-446655440001", + "first_name": "John", + "last_name": "Doe", + "display_name": "John Doe", + "avatar": "https://example.com/avatar.jpg", + }, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Issue Attachment Response Examples +ISSUE_ATTACHMENT_EXAMPLE = OpenApiExample( + name="IssueAttachment", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "screenshot.png", + "size": 1024000, + "asset_url": "https://s3.amazonaws.com/bucket/screenshot.png?signed-url", + "attributes": { + "name": "screenshot.png", + "type": "image/png", + "size": 1024000, + }, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Issue Attachment Error Response Examples +ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE = OpenApiExample( + name="Issue Attachment Not Uploaded", + value={ + "error": "The asset is not uploaded.", + "status": False, + }, + description="Error when trying to download an attachment that hasn't been uploaded yet", +) + +# Intake Issue Response Examples +INTAKE_ISSUE_EXAMPLE = OpenApiExample( + name="IntakeIssue", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "status": 0, # Pending + "source": "in_app", + "issue": { + "id": "550e8400-e29b-41d4-a716-446655440001", + "name": "Feature request: Dark mode", + "description": "Add dark mode support to the application", + "priority": "medium", + "sequence_id": 124, + }, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Module Issue Response Examples +MODULE_ISSUE_EXAMPLE = OpenApiExample( + name="ModuleIssue", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "module": "550e8400-e29b-41d4-a716-446655440001", + "issue": "550e8400-e29b-41d4-a716-446655440002", + "sub_issues_count": 2, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + +# Issue Search Response Examples +ISSUE_SEARCH_EXAMPLE = OpenApiExample( + name="IssueSearchResults", + value={ + "issues": [ + { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Fix authentication bug in user login", + "sequence_id": 123, + "project__identifier": "MAB", + "project_id": "550e8400-e29b-41d4-a716-446655440001", + "workspace__slug": "my-workspace", + }, + { + "id": "550e8400-e29b-41d4-a716-446655440002", + "name": "Add authentication middleware", + "sequence_id": 124, + "project__identifier": "MAB", + "project_id": "550e8400-e29b-41d4-a716-446655440001", + "workspace__slug": "my-workspace", + }, + ] + }, +) + +# Workspace Member Response Examples +WORKSPACE_MEMBER_EXAMPLE = OpenApiExample( + name="WorkspaceMembers", + value=[ + { + "id": "550e8400-e29b-41d4-a716-446655440000", + "first_name": "John", + "last_name": "Doe", + "display_name": "John Doe", + "email": "john.doe@example.com", + "avatar": "https://example.com/avatar.jpg", + "role": 20, + }, + { + "id": "550e8400-e29b-41d4-a716-446655440001", + "first_name": "Jane", + "last_name": "Smith", + "display_name": "Jane Smith", + "email": "jane.smith@example.com", + "avatar": "https://example.com/avatar2.jpg", + "role": 15, + }, + ], +) + +# Project Member Response Examples +PROJECT_MEMBER_EXAMPLE = OpenApiExample( + name="ProjectMembers", + value=[ + { + "id": "550e8400-e29b-41d4-a716-446655440000", + "first_name": "John", + "last_name": "Doe", + "display_name": "John Doe", + "email": "john.doe@example.com", + "avatar": "https://example.com/avatar.jpg", + }, + { + "id": "550e8400-e29b-41d4-a716-446655440001", + "first_name": "Jane", + "last_name": "Smith", + "display_name": "Jane Smith", + "email": "jane.smith@example.com", + "avatar": "https://example.com/avatar2.jpg", + }, + ], +) + +# Cycle Issue Response Examples +CYCLE_ISSUE_EXAMPLE = OpenApiExample( + name="CycleIssue", + value={ + "id": "550e8400-e29b-41d4-a716-446655440000", + "cycle": "550e8400-e29b-41d4-a716-446655440001", + "issue": "550e8400-e29b-41d4-a716-446655440002", + "sub_issues_count": 3, + "created_at": "2024-01-01T10:30:00Z", + "updated_at": "2024-01-10T15:45:00Z", + }, +) + + +# Sample data for different entity types +SAMPLE_ISSUE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Fix authentication bug in user login", + "description": "Users are unable to log in due to authentication service timeout", + "priority": "high", + "sequence_id": 123, + "state": { + "id": "550e8400-e29b-41d4-a716-446655440001", + "name": "In Progress", + "group": "started", + }, + "assignees": [], + "labels": [], + "created_at": "2024-01-15T10:30:00Z", +} + +SAMPLE_LABEL = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "bug", + "color": "#ff4444", + "description": "Issues that represent bugs in the system", +} + +SAMPLE_CYCLE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Sprint 1 - Q1 2024", + "description": "First sprint of the quarter focusing on core features", + "start_date": "2024-01-01", + "end_date": "2024-01-14", + "status": "current", +} + +SAMPLE_MODULE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Authentication Module", + "description": "User authentication and authorization features", + "start_date": "2024-01-01", + "target_date": "2024-02-15", + "status": "in_progress", +} + +SAMPLE_PROJECT = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Mobile App Backend", + "description": "Backend services for the mobile application", + "identifier": "MAB", + "network": 2, +} + +SAMPLE_STATE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "In Progress", + "color": "#ffa500", + "group": "started", + "sequence": 2, +} + +SAMPLE_COMMENT = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "comment_html": "

This issue needs more investigation. I'll look into the database connection timeout.

", + "created_at": "2024-01-15T14:20:00Z", + "actor": {"id": "550e8400-e29b-41d4-a716-446655440002", "display_name": "John Doe"}, +} + +SAMPLE_LINK = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "url": "https://github.com/example/repo/pull/123", + "title": "Fix authentication timeout issue", + "metadata": {}, +} + +SAMPLE_ACTIVITY = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "field": "priority", + "old_value": "medium", + "new_value": "high", + "created_at": "2024-01-15T11:45:00Z", + "actor": { + "id": "550e8400-e29b-41d4-a716-446655440002", + "display_name": "Jane Smith", + }, +} + +SAMPLE_INTAKE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "status": 0, + "issue": { + "id": "550e8400-e29b-41d4-a716-446655440003", + "name": "Feature request: Dark mode support", + }, + "created_at": "2024-01-15T09:15:00Z", +} + +SAMPLE_GENERIC = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "name": "Sample Item", + "created_at": "2024-01-15T12:00:00Z", +} + +SAMPLE_CYCLE_ISSUE = { + "id": "550e8400-e29b-41d4-a716-446655440000", + "cycle": "550e8400-e29b-41d4-a716-446655440001", + "issue": "550e8400-e29b-41d4-a716-446655440002", + "sub_issues_count": 3, + "created_at": "2024-01-01T10:30:00Z", +} + +# Mapping of schema types to sample data +SCHEMA_EXAMPLES = { + "Issue": SAMPLE_ISSUE, + "WorkItem": SAMPLE_ISSUE, + "Label": SAMPLE_LABEL, + "Cycle": SAMPLE_CYCLE, + "Module": SAMPLE_MODULE, + "Project": SAMPLE_PROJECT, + "State": SAMPLE_STATE, + "Comment": SAMPLE_COMMENT, + "Link": SAMPLE_LINK, + "Activity": SAMPLE_ACTIVITY, + "Intake": SAMPLE_INTAKE, + "CycleIssue": SAMPLE_CYCLE_ISSUE, +} + + +def get_sample_for_schema(schema_name): + """ + Get appropriate sample data for a schema type. + + Args: + schema_name (str): Name of the schema (e.g., "PaginatedIssueResponse") + + Returns: + dict: Sample data for the schema type + """ + # Extract base schema name from paginated responses + if schema_name.startswith("Paginated"): + base_name = schema_name.replace("Paginated", "").replace("Response", "") + return SCHEMA_EXAMPLES.get(base_name, SAMPLE_GENERIC) + + return SCHEMA_EXAMPLES.get(schema_name, SAMPLE_GENERIC) diff --git a/apps/api/plane/utils/openapi/hooks.py b/apps/api/plane/utils/openapi/hooks.py new file mode 100644 index 000000000..3cd7eaf7a --- /dev/null +++ b/apps/api/plane/utils/openapi/hooks.py @@ -0,0 +1,56 @@ +""" +Schema processing hooks for drf-spectacular OpenAPI generation. + +This module provides preprocessing and postprocessing functions that modify +the generated OpenAPI schema to apply custom filtering, tagging, and other +transformations. +""" + + +def preprocess_filter_api_v1_paths(endpoints): + """ + Filter OpenAPI endpoints to only include /api/v1/ paths and exclude PUT methods. + """ + filtered = [] + for path, path_regex, method, callback in endpoints: + # Only include paths that start with /api/v1/ and exclude PUT methods + if ( + path.startswith("/api/v1/") + and method.upper() != "PUT" + and "server" not in path.lower() + ): + filtered.append((path, path_regex, method, callback)) + return filtered + + +def generate_operation_summary(method, path, tag): + """ + Generate a human-readable summary for an operation. + """ + # Extract the main resource from the path + path_parts = [part for part in path.split("/") if part and not part.startswith("{")] + + if len(path_parts) > 0: + resource = path_parts[-1].replace("-", " ").title() + else: + resource = tag + + # Generate summary based on method + method_summaries = { + "GET": f"Retrieve {resource}", + "POST": f"Create {resource}", + "PATCH": f"Update {resource}", + "DELETE": f"Delete {resource}", + } + + # Handle specific cases + if "archive" in path.lower(): + if method == "POST": + return f'Archive {tag.rstrip("s")}' + elif method == "DELETE": + return f'Unarchive {tag.rstrip("s")}' + + if "transfer" in path.lower(): + return f'Transfer {tag.rstrip("s")}' + + return method_summaries.get(method, f"{method} {resource}") diff --git a/apps/api/plane/utils/openapi/parameters.py b/apps/api/plane/utils/openapi/parameters.py new file mode 100644 index 000000000..0d7f3a3d1 --- /dev/null +++ b/apps/api/plane/utils/openapi/parameters.py @@ -0,0 +1,493 @@ +""" +Common OpenAPI parameters for drf-spectacular. + +This module provides reusable parameter definitions that can be shared +across multiple API endpoints to ensure consistency. +""" + +from drf_spectacular.utils import OpenApiParameter, OpenApiExample +from drf_spectacular.types import OpenApiTypes + + +# Path Parameters +WORKSPACE_SLUG_PARAMETER = OpenApiParameter( + name="slug", + description="Workspace slug", + required=True, + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example workspace", + value="my-workspace", + description="A typical workspace slug", + ) + ], +) + +PROJECT_ID_PARAMETER = OpenApiParameter( + name="project_id", + description="Project ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example project ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical project UUID", + ) + ], +) + +PROJECT_PK_PARAMETER = OpenApiParameter( + name="pk", + description="Project ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example project ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical project UUID", + ) + ], +) + +PROJECT_IDENTIFIER_PARAMETER = OpenApiParameter( + name="project_identifier", + description="Project identifier (unique string within workspace)", + required=True, + type=OpenApiTypes.STR, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example project identifier", + value="PROJ", + description="A typical project identifier", + ) + ], +) + +ISSUE_IDENTIFIER_PARAMETER = OpenApiParameter( + name="issue_identifier", + description="Issue sequence ID (numeric identifier within project)", + required=True, + type=OpenApiTypes.INT, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example issue identifier", + value=123, + description="A typical issue sequence ID", + ) + ], +) + +ASSET_ID_PARAMETER = OpenApiParameter( + name="asset_id", + description="Asset ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example asset ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical asset UUID", + ) + ], +) + +CYCLE_ID_PARAMETER = OpenApiParameter( + name="cycle_id", + description="Cycle ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example cycle ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical cycle UUID", + ) + ], +) + +MODULE_ID_PARAMETER = OpenApiParameter( + name="module_id", + description="Module ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example module ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical module UUID", + ) + ], +) + +MODULE_PK_PARAMETER = OpenApiParameter( + name="pk", + description="Module ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example module ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical module UUID", + ) + ], +) + +ISSUE_ID_PARAMETER = OpenApiParameter( + name="issue_id", + description="Issue ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example issue ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical issue UUID", + ) + ], +) + +STATE_ID_PARAMETER = OpenApiParameter( + name="state_id", + description="State ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example state ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical state UUID", + ) + ], +) + +# Additional Path Parameters +LABEL_ID_PARAMETER = OpenApiParameter( + name="pk", + description="Label ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example label ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical label UUID", + ) + ], +) + +COMMENT_ID_PARAMETER = OpenApiParameter( + name="pk", + description="Comment ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example comment ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical comment UUID", + ) + ], +) + +LINK_ID_PARAMETER = OpenApiParameter( + name="pk", + description="Link ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example link ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical link UUID", + ) + ], +) + +ATTACHMENT_ID_PARAMETER = OpenApiParameter( + name="pk", + description="Attachment ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example attachment ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical attachment UUID", + ) + ], +) + +ACTIVITY_ID_PARAMETER = OpenApiParameter( + name="pk", + description="Activity ID", + required=True, + type=OpenApiTypes.UUID, + location=OpenApiParameter.PATH, + examples=[ + OpenApiExample( + name="Example activity ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="A typical activity UUID", + ) + ], +) + +# Query Parameters +CURSOR_PARAMETER = OpenApiParameter( + name="cursor", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Pagination cursor for getting next set of results", + required=False, + examples=[ + OpenApiExample( + name="Next page cursor", + value="20:1:0", + description="Cursor format: 'page_size:page_number:offset'", + ) + ], +) + +PER_PAGE_PARAMETER = OpenApiParameter( + name="per_page", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Number of results per page (default: 20, max: 100)", + required=False, + examples=[ + OpenApiExample(name="Default", value=20), + OpenApiExample(name="Maximum", value=100), + ], +) + +# External Integration Parameters +EXTERNAL_ID_PARAMETER = OpenApiParameter( + name="external_id", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="External system identifier for filtering or lookup", + required=False, + examples=[ + OpenApiExample( + name="GitHub Issue", + value="1234567890", + description="GitHub issue number", + ) + ], +) + +EXTERNAL_SOURCE_PARAMETER = OpenApiParameter( + name="external_source", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="External system source name for filtering or lookup", + required=False, + examples=[ + OpenApiExample( + name="GitHub", + value="github", + description="GitHub integration source", + ), + OpenApiExample( + name="Jira", + value="jira", + description="Jira integration source", + ), + ], +) + +# Ordering Parameters +ORDER_BY_PARAMETER = OpenApiParameter( + name="order_by", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Field to order results by. Prefix with '-' for descending order", + required=False, + examples=[ + OpenApiExample( + name="Created date descending", + value="-created_at", + description="Most recent items first", + ), + OpenApiExample( + name="Priority ascending", + value="priority", + description="Order by priority (urgent, high, medium, low, none)", + ), + OpenApiExample( + name="State group", + value="state__group", + description="Order by state group (backlog, unstarted, started, completed, cancelled)", + ), + OpenApiExample( + name="Assignee name", + value="assignees__first_name", + description="Order by assignee first name", + ), + ], +) + +# Search Parameters +SEARCH_PARAMETER = OpenApiParameter( + name="search", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Search query to filter results by name, description, or identifier", + required=False, + examples=[ + OpenApiExample( + name="Name search", + value="bug fix", + description="Search for items containing 'bug fix'", + ), + OpenApiExample( + name="Sequence ID", + value="123", + description="Search by sequence ID number", + ), + ], +) + +SEARCH_PARAMETER_REQUIRED = OpenApiParameter( + name="search", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Search query to filter results by name, description, or identifier", + required=True, + examples=[ + OpenApiExample( + name="Name search", + value="bug fix", + description="Search for items containing 'bug fix'", + ), + OpenApiExample( + name="Sequence ID", + value="123", + description="Search by sequence ID number", + ), + ], +) + +LIMIT_PARAMETER = OpenApiParameter( + name="limit", + type=OpenApiTypes.INT, + location=OpenApiParameter.QUERY, + description="Maximum number of results to return", + required=False, + examples=[ + OpenApiExample(name="Default", value=10), + OpenApiExample(name="More results", value=50), + ], +) + +WORKSPACE_SEARCH_PARAMETER = OpenApiParameter( + name="workspace_search", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Whether to search across entire workspace or within specific project", + required=False, + examples=[ + OpenApiExample( + name="Project only", + value="false", + description="Search within specific project only", + ), + OpenApiExample( + name="Workspace wide", + value="true", + description="Search across entire workspace", + ), + ], +) + +PROJECT_ID_QUERY_PARAMETER = OpenApiParameter( + name="project_id", + description="Project ID for filtering results within a specific project", + required=False, + type=OpenApiTypes.UUID, + location=OpenApiParameter.QUERY, + examples=[ + OpenApiExample( + name="Example project ID", + value="550e8400-e29b-41d4-a716-446655440000", + description="Filter results for this project", + ) + ], +) + +# Cycle View Parameter +CYCLE_VIEW_PARAMETER = OpenApiParameter( + name="cycle_view", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Filter cycles by status", + required=False, + examples=[ + OpenApiExample(name="All cycles", value="all"), + OpenApiExample(name="Current cycles", value="current"), + OpenApiExample(name="Upcoming cycles", value="upcoming"), + OpenApiExample(name="Completed cycles", value="completed"), + OpenApiExample(name="Draft cycles", value="draft"), + OpenApiExample(name="Incomplete cycles", value="incomplete"), + ], +) + +# Field Selection Parameters +FIELDS_PARAMETER = OpenApiParameter( + name="fields", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Comma-separated list of fields to include in response", + required=False, + examples=[ + OpenApiExample( + name="Basic fields", + value="id,name,description", + description="Include only basic fields", + ), + OpenApiExample( + name="With relations", + value="id,name,assignees,state", + description="Include fields with relationships", + ), + ], +) + +EXPAND_PARAMETER = OpenApiParameter( + name="expand", + type=OpenApiTypes.STR, + location=OpenApiParameter.QUERY, + description="Comma-separated list of related fields to expand in response", + required=False, + examples=[ + OpenApiExample( + name="Expand assignees", + value="assignees", + description="Include full assignee details", + ), + OpenApiExample( + name="Multiple expansions", + value="assignees,labels,state", + description="Include details for multiple relations", + ), + ], +) diff --git a/apps/api/plane/utils/openapi/responses.py b/apps/api/plane/utils/openapi/responses.py new file mode 100644 index 000000000..a70a749f3 --- /dev/null +++ b/apps/api/plane/utils/openapi/responses.py @@ -0,0 +1,492 @@ +""" +Common OpenAPI responses for drf-spectacular. + +This module provides reusable response definitions for common HTTP status codes +and scenarios that occur across multiple API endpoints. +""" + +from drf_spectacular.utils import OpenApiResponse, OpenApiExample, inline_serializer +from rest_framework import serializers +from .examples import get_sample_for_schema + + +# Authentication & Authorization Responses +UNAUTHORIZED_RESPONSE = OpenApiResponse( + description="Authentication credentials were not provided or are invalid.", + examples=[ + OpenApiExample( + name="Unauthorized", + value={ + "error": "Authentication credentials were not provided", + "error_code": "AUTHENTICATION_REQUIRED", + }, + ) + ], +) + +FORBIDDEN_RESPONSE = OpenApiResponse( + description="Permission denied. User lacks required permissions.", + examples=[ + OpenApiExample( + name="Forbidden", + value={ + "error": "You do not have permission to perform this action", + "error_code": "PERMISSION_DENIED", + }, + ) + ], +) + + +# Resource Responses +NOT_FOUND_RESPONSE = OpenApiResponse( + description="The requested resource was not found.", + examples=[ + OpenApiExample( + name="Not Found", + value={"error": "Not found", "error_code": "RESOURCE_NOT_FOUND"}, + ) + ], +) + +VALIDATION_ERROR_RESPONSE = OpenApiResponse( + description="Validation error occurred with the provided data.", + examples=[ + OpenApiExample( + name="Validation Error", + value={ + "error": "Validation failed", + "details": {"field_name": ["This field is required."]}, + }, + ) + ], +) + +# Generic Success Responses +DELETED_RESPONSE = OpenApiResponse( + description="Resource deleted successfully", + examples=[ + OpenApiExample( + name="Deleted Successfully", + value={"message": "Resource deleted successfully"}, + ) + ], +) + +ARCHIVED_RESPONSE = OpenApiResponse( + description="Resource archived successfully", + examples=[ + OpenApiExample( + name="Archived Successfully", + value={"message": "Resource archived successfully"}, + ) + ], +) + +UNARCHIVED_RESPONSE = OpenApiResponse( + description="Resource unarchived successfully", + examples=[ + OpenApiExample( + name="Unarchived Successfully", + value={"message": "Resource unarchived successfully"}, + ) + ], +) + +# Specific Error Responses +INVALID_REQUEST_RESPONSE = OpenApiResponse( + description="Invalid request data provided", + examples=[ + OpenApiExample( + name="Invalid Request", + value={ + "error": "Invalid request data", + "details": "Specific validation errors", + }, + ) + ], +) + +CONFLICT_RESPONSE = OpenApiResponse( + description="Resource conflict - duplicate or constraint violation", + examples=[ + OpenApiExample( + name="Resource Conflict", + value={ + "error": "Resource with the same identifier already exists", + "id": "550e8400-e29b-41d4-a716-446655440000", + }, + ) + ], +) + +ADMIN_ONLY_RESPONSE = OpenApiResponse( + description="Only admin or creator can perform this action", + examples=[ + OpenApiExample( + name="Admin Only", + value={"error": "Only admin or creator can perform this action"}, + ) + ], +) + +CANNOT_DELETE_RESPONSE = OpenApiResponse( + description="Resource cannot be deleted due to constraints", + examples=[ + OpenApiExample( + name="Cannot Delete", + value={"error": "Resource cannot be deleted", "reason": "Has dependencies"}, + ) + ], +) + +CANNOT_ARCHIVE_RESPONSE = OpenApiResponse( + description="Resource cannot be archived in current state", + examples=[ + OpenApiExample( + name="Cannot Archive", + value={ + "error": "Resource cannot be archived", + "reason": "Not in valid state", + }, + ) + ], +) + +REQUIRED_FIELDS_RESPONSE = OpenApiResponse( + description="Required fields are missing", + examples=[ + OpenApiExample( + name="Required Fields Missing", + value={"error": "Required fields are missing", "fields": ["name", "type"]}, + ) + ], +) + +# Project-specific Responses +PROJECT_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Project not found", + examples=[ + OpenApiExample( + name="Project Not Found", + value={"error": "Project not found"}, + ) + ], +) + +WORKSPACE_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Workspace not found", + examples=[ + OpenApiExample( + name="Workspace Not Found", + value={"error": "Workspace not found"}, + ) + ], +) + +PROJECT_NAME_TAKEN_RESPONSE = OpenApiResponse( + description="Project name already taken", + examples=[ + OpenApiExample( + name="Project Name Taken", + value={"error": "Project name already taken"}, + ) + ], +) + +# Issue-specific Responses +ISSUE_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Issue not found", + examples=[ + OpenApiExample( + name="Issue Not Found", + value={"error": "Issue not found"}, + ) + ], +) + +WORK_ITEM_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Work item not found", + examples=[ + OpenApiExample( + name="Work Item Not Found", + value={"error": "Work item not found"}, + ) + ], +) + +EXTERNAL_ID_EXISTS_RESPONSE = OpenApiResponse( + description="Resource with same external ID already exists", + examples=[ + OpenApiExample( + name="External ID Exists", + value={ + "error": "Resource with the same external id and external source already exists", + "id": "550e8400-e29b-41d4-a716-446655440000", + }, + ) + ], +) + +# Label-specific Responses +LABEL_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Label not found", + examples=[ + OpenApiExample( + name="Label Not Found", + value={"error": "Label not found"}, + ) + ], +) + +LABEL_NAME_EXISTS_RESPONSE = OpenApiResponse( + description="Label with the same name already exists", + examples=[ + OpenApiExample( + name="Label Name Exists", + value={"error": "Label with the same name already exists in the project"}, + ) + ], +) + +# Module-specific Responses +MODULE_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Module not found", + examples=[ + OpenApiExample( + name="Module Not Found", + value={"error": "Module not found"}, + ) + ], +) + +MODULE_ISSUE_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Module issue not found", + examples=[ + OpenApiExample( + name="Module Issue Not Found", + value={"error": "Module issue not found"}, + ) + ], +) + +# Cycle-specific Responses +CYCLE_CANNOT_ARCHIVE_RESPONSE = OpenApiResponse( + description="Cycle cannot be archived", + examples=[ + OpenApiExample( + name="Cycle Cannot Archive", + value={"error": "Only completed cycles can be archived"}, + ) + ], +) + +# State-specific Responses +STATE_NAME_EXISTS_RESPONSE = OpenApiResponse( + description="State with the same name already exists", + examples=[ + OpenApiExample( + name="State Name Exists", + value={"error": "State with the same name already exists"}, + ) + ], +) + +STATE_CANNOT_DELETE_RESPONSE = OpenApiResponse( + description="State cannot be deleted", + examples=[ + OpenApiExample( + name="State Cannot Delete", + value={ + "error": "State cannot be deleted", + "reason": "Default state or has issues", + }, + ) + ], +) + +# Comment-specific Responses +COMMENT_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Comment not found", + examples=[ + OpenApiExample( + name="Comment Not Found", + value={"error": "Comment not found"}, + ) + ], +) + +# Link-specific Responses +LINK_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Link not found", + examples=[ + OpenApiExample( + name="Link Not Found", + value={"error": "Link not found"}, + ) + ], +) + +# Attachment-specific Responses +ATTACHMENT_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Attachment not found", + examples=[ + OpenApiExample( + name="Attachment Not Found", + value={"error": "Attachment not found"}, + ) + ], +) + +# Search-specific Responses +BAD_SEARCH_REQUEST_RESPONSE = OpenApiResponse( + description="Bad request - invalid search parameters", + examples=[ + OpenApiExample( + name="Bad Search Request", + value={"error": "Invalid search parameters"}, + ) + ], +) + + +# Pagination Response Templates +def create_paginated_response( + item_schema, + schema_name, + description="Paginated results", + example_name="Paginated Response", +): + """Create a paginated response with the specified item schema""" + + return OpenApiResponse( + description=description, + response=inline_serializer( + name=schema_name, + fields={ + "grouped_by": serializers.CharField(allow_null=True), + "sub_grouped_by": serializers.CharField(allow_null=True), + "total_count": serializers.IntegerField(), + "next_cursor": serializers.CharField(), + "prev_cursor": serializers.CharField(), + "next_page_results": serializers.BooleanField(), + "prev_page_results": serializers.BooleanField(), + "count": serializers.IntegerField(), + "total_pages": serializers.IntegerField(), + "total_results": serializers.IntegerField(), + "extra_stats": serializers.CharField(allow_null=True), + "results": serializers.ListField(child=item_schema()), + }, + ), + examples=[ + OpenApiExample( + name=example_name, + value={ + "grouped_by": "state", + "sub_grouped_by": "priority", + "total_count": 150, + "next_cursor": "20:1:0", + "prev_cursor": "20:0:0", + "next_page_results": True, + "prev_page_results": False, + "count": 20, + "total_pages": 8, + "total_results": 150, + "extra_stats": None, + "results": [get_sample_for_schema(schema_name)], + }, + summary=example_name, + ) + ], + ) + + +# Asset-specific Responses +PRESIGNED_URL_SUCCESS_RESPONSE = OpenApiResponse( + description="Presigned URL generated successfully" +) + +GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE = OpenApiResponse( + description="Presigned URL generated successfully", + examples=[ + OpenApiExample( + name="Generic Asset Upload Response", + value={ + "upload_data": { + "url": "https://s3.amazonaws.com/bucket-name", + "fields": { + "key": "workspace-id/uuid-filename.pdf", + "AWSAccessKeyId": "AKIA...", + "policy": "eyJ...", + "signature": "abc123...", + }, + }, + "asset_id": "550e8400-e29b-41d4-a716-446655440000", + "asset_url": "https://cdn.example.com/workspace-id/uuid-filename.pdf", + }, + ) + ], +) + +GENERIC_ASSET_VALIDATION_ERROR_RESPONSE = OpenApiResponse( + description="Validation error", + examples=[ + OpenApiExample( + name="Missing required fields", + value={"error": "Name and size are required fields.", "status": False}, + ), + OpenApiExample( + name="Invalid file type", + value={"error": "Invalid file type.", "status": False}, + ), + ], +) + +ASSET_CONFLICT_RESPONSE = OpenApiResponse( + description="Asset with same external ID already exists", + examples=[ + OpenApiExample( + name="Duplicate external asset", + value={ + "message": "Asset with same external id and source already exists", + "asset_id": "550e8400-e29b-41d4-a716-446655440000", + "asset_url": "https://cdn.example.com/existing-file.pdf", + }, + ) + ], +) + +ASSET_DOWNLOAD_SUCCESS_RESPONSE = OpenApiResponse( + description="Presigned download URL generated successfully", + examples=[ + OpenApiExample( + name="Asset Download Response", + value={ + "asset_id": "550e8400-e29b-41d4-a716-446655440000", + "asset_url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url", + "asset_name": "document.pdf", + "asset_type": "application/pdf", + }, + ) + ], +) + +ASSET_DOWNLOAD_ERROR_RESPONSE = OpenApiResponse( + description="Bad request", + examples=[ + OpenApiExample( + name="Asset not uploaded", value={"error": "Asset not yet uploaded"} + ), + ], +) + +ASSET_UPDATED_RESPONSE = OpenApiResponse(description="Asset updated successfully") + +ASSET_DELETED_RESPONSE = OpenApiResponse(description="Asset deleted successfully") + +ASSET_NOT_FOUND_RESPONSE = OpenApiResponse( + description="Asset not found", + examples=[ + OpenApiExample(name="Asset not found", value={"error": "Asset not found"}) + ], +) diff --git a/apps/api/plane/utils/paginator.py b/apps/api/plane/utils/paginator.py index ce9c65f64..0d065e253 100644 --- a/apps/api/plane/utils/paginator.py +++ b/apps/api/plane/utils/paginator.py @@ -160,7 +160,7 @@ class OffsetPaginator: total_count = ( self.total_count_queryset.count() if self.total_count_queryset - else results.count() + else queryset.count() ) # Check if there are more results available after the current page diff --git a/apps/api/requirements/base.txt b/apps/api/requirements/base.txt index 3a12b9bf6..28fede97f 100644 --- a/apps/api/requirements/base.txt +++ b/apps/api/requirements/base.txt @@ -1,7 +1,7 @@ # base requirements # django -Django==4.2.22 +Django==4.2.24 # rest framework djangorestframework==3.15.2 # postgres @@ -9,6 +9,8 @@ psycopg==3.1.18 psycopg-binary==3.1.18 psycopg-c==3.1.18 dj-database-url==2.1.0 +# mongo +pymongo==4.6.3 # redis redis==5.0.4 django-redis==5.4.0 @@ -65,3 +67,7 @@ opentelemetry-api==1.28.1 opentelemetry-sdk==1.28.1 opentelemetry-instrumentation-django==0.49b1 opentelemetry-exporter-otlp==1.28.1 +# OpenAPI Specification +drf-spectacular==0.28.0 +# html sanitizer +nh3==0.2.18 diff --git a/apps/api/templates/emails/auth/forgot_password.html b/apps/api/templates/emails/auth/forgot_password.html index 9df90724f..f673c1e63 100644 --- a/apps/api/templates/emails/auth/forgot_password.html +++ b/apps/api/templates/emails/auth/forgot_password.html @@ -8,8 +8,8 @@ Set a new password to your Plane account - - + + - + - + @@ -94,9 +94,9 @@ @@ -187,7 +187,7 @@ @@ -236,7 +236,7 @@ diff --git a/apps/api/templates/emails/auth/magic_signin.html b/apps/api/templates/emails/auth/magic_signin.html index a7a86a94c..c32b399fb 100644 --- a/apps/api/templates/emails/auth/magic_signin.html +++ b/apps/api/templates/emails/auth/magic_signin.html @@ -9,7 +9,7 @@ - + - + - + @@ -80,7 +80,7 @@ - + @@ -145,7 +145,7 @@ ­
-

Despite our popularity, we are humbly early-stage. We are shipping fast, so please reach out to us with feature requests, major and minor nits, and anything else you find missing. We read every message, tweet, and conversation and update our public roadmap.

+

Despite our popularity, we are humbly early-stage. We are shipping fast, so please reach out to us with feature requests, major and minor nits, and anything else you find missing. We read every message, tweet, and conversation and update our public roadmap.

­ @@ -194,7 +194,7 @@ - + @@ -202,7 +202,7 @@ - + @@ -210,7 +210,7 @@ - + @@ -218,7 +218,7 @@ - + diff --git a/apps/api/templates/emails/invitations/project_invitation.html b/apps/api/templates/emails/invitations/project_invitation.html index 0a40d42bf..254408ac5 100644 --- a/apps/api/templates/emails/invitations/project_invitation.html +++ b/apps/api/templates/emails/invitations/project_invitation.html @@ -8,7 +8,7 @@ {{ first_name }} invited you to join {{ project_name }} on Plane - + @@ -58,7 +58,7 @@ ­ - + ­ @@ -91,17 +91,17 @@ - -

Accept the invite

+
+

Accept the invite

diff --git a/apps/api/templates/emails/invitations/workspace_invitation.html b/apps/api/templates/emails/invitations/workspace_invitation.html index a94745383..619f03992 100644 --- a/apps/api/templates/emails/invitations/workspace_invitation.html +++ b/apps/api/templates/emails/invitations/workspace_invitation.html @@ -8,8 +8,8 @@ {{first_name}} has invited you to join them in {{workspace_name}} on Plane. - - + + - + - + @@ -88,9 +88,9 @@ @@ -131,7 +131,7 @@ diff --git a/apps/api/templates/emails/notifications/issue-updates.html b/apps/api/templates/emails/notifications/issue-updates.html index 8ba91c6fe..c6fe3b278 100644 --- a/apps/api/templates/emails/notifications/issue-updates.html +++ b/apps/api/templates/emails/notifications/issue-updates.html @@ -8,14 +8,14 @@ - +
-
+
diff --git a/apps/api/templates/emails/notifications/project_addition.html b/apps/api/templates/emails/notifications/project_addition.html index ccf0f7a95..59c7e0e4d 100644 --- a/apps/api/templates/emails/notifications/project_addition.html +++ b/apps/api/templates/emails/notifications/project_addition.html @@ -164,7 +164,7 @@ text-align: center !important; } .r15-r { - background-color: #3f76ff !important; + background-color: #006399 !important; border-radius: 4px !important; border-width: 0px !important; box-sizing: border-box; @@ -296,7 +296,7 @@ } a, a:link { - color: #3f76ff; + color: #006399; text-decoration: underline; } .nl2go-default-textstyle { @@ -372,7 +372,7 @@ [endif]--> @@ -380,7 +380,7 @@ @@ -483,7 +483,7 @@ " > @@ -1308,7 +1308,7 @@ href="https://www.linkedin.com/company/planepowers/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1368,7 +1368,7 @@ href="https://twitter.com/planepowers" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1428,7 +1428,7 @@ href="https://plane.so/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > diff --git a/apps/api/templates/emails/notifications/webhook-deactivate.html b/apps/api/templates/emails/notifications/webhook-deactivate.html index 0755eb498..272271f96 100644 --- a/apps/api/templates/emails/notifications/webhook-deactivate.html +++ b/apps/api/templates/emails/notifications/webhook-deactivate.html @@ -8,8 +8,8 @@ {{ message }} - - + + - + - + @@ -80,7 +80,7 @@ - + @@ -155,7 +155,7 @@ ­
-

Despite our popularity, we are humbly early-stage. We are shipping fast, so please reach out to us with feature requests, major and minor nits, and anything else you find missing. We read every message, tweet, and conversation and update our public roadmap.

+

Despite our popularity, we are humbly early-stage. We are shipping fast, so please reach out to us with feature requests, major and minor nits, and anything else you find missing. We read every message, tweet, and conversation and update our public roadmap.

­ @@ -204,7 +204,7 @@ - + @@ -212,7 +212,7 @@ - + @@ -220,7 +220,7 @@ - + @@ -228,7 +228,7 @@ - + diff --git a/apps/api/templates/emails/user/user_activation.html b/apps/api/templates/emails/user/user_activation.html index 1ec60e955..a454d0a3c 100644 --- a/apps/api/templates/emails/user/user_activation.html +++ b/apps/api/templates/emails/user/user_activation.html @@ -173,7 +173,7 @@ text-align: center !important; } .r16-r { - background-color: #3f76ff !important; + background-color: #006399 !important; border-radius: 4px !important; border-width: 0px !important; box-sizing: border-box; @@ -305,7 +305,7 @@ } a, a:link { - color: #3f76ff; + color: #006399; text-decoration: underline; } .nl2go-default-textstyle { @@ -382,7 +382,7 @@ @@ -390,7 +390,7 @@ @@ -493,7 +493,7 @@ " > @@ -1287,7 +1287,7 @@ href="https://www.linkedin.com/company/planepowers/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1347,7 +1347,7 @@ href="https://twitter.com/planepowers" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1407,7 +1407,7 @@ href="https://plane.so/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > diff --git a/apps/api/templates/emails/user/user_deactivation.html b/apps/api/templates/emails/user/user_deactivation.html index b6bc7b768..8a0c097a7 100644 --- a/apps/api/templates/emails/user/user_deactivation.html +++ b/apps/api/templates/emails/user/user_deactivation.html @@ -173,7 +173,7 @@ text-align: center !important; } .r16-r { - background-color: #3f76ff !important; + background-color: #006399 !important; border-radius: 4px !important; border-width: 0px !important; box-sizing: border-box; @@ -305,7 +305,7 @@ } a, a:link { - color: #3f76ff; + color: #006399; text-decoration: underline; } .nl2go-default-textstyle { @@ -382,7 +382,7 @@ @@ -390,7 +390,7 @@ @@ -493,7 +493,7 @@ " > @@ -1288,7 +1288,7 @@ href="https://www.linkedin.com/company/planepowers/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1348,7 +1348,7 @@ href="https://twitter.com/planepowers" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > @@ -1408,7 +1408,7 @@ href="https://plane.so/" target="_blank" style=" - color: #3f76ff; + color: #006399; text-decoration: underline; " > diff --git a/apps/live/.eslintrc.cjs b/apps/live/.eslintrc.cjs new file mode 100644 index 000000000..0a4c3d9ee --- /dev/null +++ b/apps/live/.eslintrc.cjs @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["@plane/eslint-config/server.js"], +}; diff --git a/apps/live/.eslintrc.json b/apps/live/.eslintrc.json deleted file mode 100644 index db20d9097..000000000 --- a/apps/live/.eslintrc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "root": true, - "extends": ["@plane/eslint-config/server.js"], - "parser": "@typescript-eslint/parser" -} diff --git a/apps/live/Dockerfile.dev b/apps/live/Dockerfile.dev index 4d251ee9a..5e0f53726 100644 --- a/apps/live/Dockerfile.dev +++ b/apps/live/Dockerfile.dev @@ -4,12 +4,12 @@ RUN apk add --no-cache libc6-compat WORKDIR /app COPY . . -RUN yarn global add turbo -RUN yarn install +RUN corepack enable pnpm && pnpm add -g turbo +RUN pnpm install EXPOSE 3003 ENV TURBO_TELEMETRY_DISABLED=1 VOLUME [ "/app/node_modules", "/app/live/node_modules"] -CMD ["yarn","dev", "--filter=live"] +CMD ["pnpm","dev", "--filter=live"] diff --git a/apps/live/Dockerfile.live b/apps/live/Dockerfile.live index c3455fdce..a353357cd 100644 --- a/apps/live/Dockerfile.live +++ b/apps/live/Dockerfile.live @@ -1,5 +1,11 @@ +# syntax=docker/dockerfile:1.7 FROM node:22-alpine AS base +# Setup pnpm package manager with corepack and configure global bin directory for caching +ENV PNPM_HOME="/pnpm" +ENV PATH="$PNPM_HOME:$PATH" +RUN corepack enable + # ***************************************************************************** # STAGE 1: Prune the project # ***************************************************************************** @@ -9,9 +15,10 @@ RUN apk update RUN apk add --no-cache libc6-compat # Set working directory WORKDIR /app -RUN yarn global add turbo +ARG TURBO_VERSION=2.5.6 +RUN corepack enable pnpm && pnpm add -g turbo@${TURBO_VERSION} COPY . . -RUN turbo prune live --docker +RUN turbo prune --scope=live --docker # ***************************************************************************** # STAGE 2: Install dependencies & build the project @@ -25,16 +32,18 @@ WORKDIR /app # First install dependencies (as they change less often) COPY .gitignore .gitignore COPY --from=builder /app/out/json/ . -COPY --from=builder /app/out/yarn.lock ./yarn.lock -RUN yarn install +COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml +RUN corepack enable pnpm +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm fetch --store-dir=/pnpm/store # Build the project and its dependencies COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install --offline --frozen-lockfile --store-dir=/pnpm/store ENV TURBO_TELEMETRY_DISABLED=1 -RUN yarn turbo build --filter=live +RUN pnpm turbo run build --filter=live # ***************************************************************************** # STAGE 3: Run the project @@ -44,11 +53,12 @@ FROM base AS runner WORKDIR /app COPY --from=installer /app/packages ./packages -COPY --from=installer /app/apps/live/dist ./live +COPY --from=installer /app/apps/live/dist ./apps/live/dist +COPY --from=installer /app/apps/live/node_modules ./apps/live/node_modules COPY --from=installer /app/node_modules ./node_modules ENV TURBO_TELEMETRY_DISABLED=1 EXPOSE 3000 -CMD ["node", "live/server.js"] \ No newline at end of file +CMD ["node", "apps/live/dist/server.js"] diff --git a/apps/live/package.json b/apps/live/package.json index 7593ed118..97e5c50d7 100644 --- a/apps/live/package.json +++ b/apps/live/package.json @@ -1,16 +1,16 @@ { "name": "live", - "version": "0.28.0", + "version": "1.0.0", "license": "AGPL-3.0", "description": "A realtime collaborative server powers Plane's rich text editor", "main": "./src/server.ts", "private": true, "type": "module", "scripts": { - "dev": "tsup --watch --onSuccess 'node --env-file=.env dist/server.js'", - "build": "tsc --noEmit && tsup", + "build": "tsdown", + "dev": "tsdown --watch", "start": "node --env-file=.env dist/server.js", - "check:lint": "eslint . --max-warnings 0", + "check:lint": "eslint . --max-warnings 10", "check:types": "tsc --noEmit", "check:format": "prettier --check \"**/*.{ts,tsx,md,json,css,scss}\"", "fix:lint": "eslint . --fix", @@ -24,11 +24,11 @@ "@hocuspocus/extension-logger": "^2.15.0", "@hocuspocus/extension-redis": "^2.15.0", "@hocuspocus/server": "^2.15.0", - "@plane/editor": "*", - "@plane/types": "*", + "@plane/editor": "workspace:*", + "@plane/types": "workspace:*", "@tiptap/core": "^2.22.3", "@tiptap/html": "^2.22.3", - "axios": "1.11.0", + "axios": "catalog:", "compression": "1.8.1", "cors": "^2.8.5", "dotenv": "^16.4.5", @@ -36,29 +36,30 @@ "express-ws": "^5.0.2", "helmet": "^7.1.0", "ioredis": "^5.4.1", - "lodash": "^4.17.21", + "lodash": "catalog:", "morgan": "1.10.1", "pino-http": "^10.3.0", "pino-pretty": "^11.2.2", - "uuid": "^10.0.0", + "uuid": "catalog:", "y-prosemirror": "^1.2.15", "y-protocols": "^1.0.6", "yjs": "^13.6.20" }, "devDependencies": { - "@plane/eslint-config": "*", - "@plane/typescript-config": "*", + "@plane/eslint-config": "workspace:*", + "@plane/typescript-config": "workspace:*", "@types/compression": "1.8.1", "@types/cors": "^2.8.17", - "@types/dotenv": "^8.2.0", - "@types/express": "^4.17.21", - "@types/express-ws": "^3.0.4", + "@types/express": "^4.17.23", + "@types/express-ws": "^3.0.5", "@types/node": "^20.14.9", "@types/pino-http": "^5.8.4", + "@types/uuid": "^9.0.1", "concurrently": "^9.0.1", "nodemon": "^3.1.7", "ts-node": "^10.9.2", - "tsup": "8.4.0", - "typescript": "5.8.3" + "tsdown": "catalog:", + "typescript": "catalog:", + "ws": "^8.18.3" } } diff --git a/apps/live/src/ce/types/common.d.ts b/apps/live/src/ce/types/common.d.ts index 2f51c6ff5..ffc9e1053 100644 --- a/apps/live/src/ce/types/common.d.ts +++ b/apps/live/src/ce/types/common.d.ts @@ -1 +1 @@ -export type TAdditionalDocumentTypes = {}; +export type TAdditionalDocumentTypes = never; diff --git a/apps/live/src/core/extensions/index.ts b/apps/live/src/core/extensions/index.ts index 7364169a4..1d14d41b8 100644 --- a/apps/live/src/core/extensions/index.ts +++ b/apps/live/src/core/extensions/index.ts @@ -1,20 +1,17 @@ -// Third-party libraries -import { Redis } from "ioredis"; -// Hocuspocus extensions and core import { Database } from "@hocuspocus/extension-database"; -import { Extension } from "@hocuspocus/server"; import { Logger } from "@hocuspocus/extension-logger"; import { Redis as HocusPocusRedis } from "@hocuspocus/extension-redis"; +import { Extension } from "@hocuspocus/server"; +import { Redis } from "ioredis"; // core helpers and utilities import { manualLogger } from "@/core/helpers/logger.js"; -import { getRedisUrl } from "@/core/lib/utils/redis-url.js"; // core libraries import { fetchPageDescriptionBinary, updatePageDescription } from "@/core/lib/page.js"; +import { getRedisUrl } from "@/core/lib/utils/redis-url.js"; +import { type HocusPocusServerContext, type TDocumentTypes } from "@/core/types/common.js"; // plane live libraries import { fetchDocument } from "@/plane-live/lib/fetch-document.js"; import { updateDocument } from "@/plane-live/lib/update-document.js"; -// types -import { type HocusPocusServerContext, type TDocumentTypes } from "@/core/types/common.js"; export const getExtensions: () => Promise = async () => { const extensions: Extension[] = [ diff --git a/apps/live/src/core/helpers/logger.ts b/apps/live/src/core/helpers/logger.ts index 07efaea6e..f93c9e5ff 100644 --- a/apps/live/src/core/helpers/logger.ts +++ b/apps/live/src/core/helpers/logger.ts @@ -1,5 +1,4 @@ import { pinoHttp } from "pino-http"; -import { Logger } from "pino"; const transport = { target: "pino-pretty", @@ -9,6 +8,7 @@ const transport = { }; const hooks = { + // eslint-disable-next-line @typescript-eslint/no-explicit-any logMethod(inputArgs: any, method: any): any { if (inputArgs.length >= 2) { const arg1 = inputArgs.shift(); @@ -36,4 +36,4 @@ export const logger = pinoHttp({ }, }); -export const manualLogger: Logger = logger.logger; +export const manualLogger: typeof logger.logger = logger.logger; diff --git a/apps/live/src/core/hocuspocus-server.ts b/apps/live/src/core/hocuspocus-server.ts index 072d45cbc..df69c2cb6 100644 --- a/apps/live/src/core/hocuspocus-server.ts +++ b/apps/live/src/core/hocuspocus-server.ts @@ -1,12 +1,12 @@ import { Server } from "@hocuspocus/server"; import { v4 as uuidv4 } from "uuid"; -// lib -import { handleAuthentication } from "@/core/lib/authentication.js"; -// extensions -import { getExtensions } from "@/core/extensions/index.js"; -import { DocumentCollaborativeEvents, TDocumentEventsServer } from "@plane/editor/lib"; // editor types import { TUserDetails } from "@plane/editor"; +import { DocumentCollaborativeEvents, TDocumentEventsServer } from "@plane/editor/lib"; +// extensions +import { getExtensions } from "@/core/extensions/index.js"; +// lib +import { handleAuthentication } from "@/core/lib/authentication.js"; // types import { type HocusPocusServerContext } from "@/core/types/common.js"; @@ -52,7 +52,7 @@ export const getHocusPocusServer = async () => { cookie, userId, }); - } catch (error) { + } catch (_error) { throw Error("Authentication unsuccessful!"); } }, diff --git a/apps/live/src/core/lib/authentication.ts b/apps/live/src/core/lib/authentication.ts index 0f679337c..c7f190e3a 100644 --- a/apps/live/src/core/lib/authentication.ts +++ b/apps/live/src/core/lib/authentication.ts @@ -1,7 +1,7 @@ -// services -import { UserService } from "@/core/services/user.service.js"; // core helpers import { manualLogger } from "@/core/helpers/logger.js"; +// services +import { UserService } from "@/core/services/user.service.js"; const userService = new UserService(); diff --git a/apps/live/src/core/services/api.service.ts b/apps/live/src/core/services/api.service.ts index 1aa5cf9e5..dbef2ae17 100644 --- a/apps/live/src/core/services/api.service.ts +++ b/apps/live/src/core/services/api.service.ts @@ -14,6 +14,7 @@ export abstract class APIService { this.axiosInstance = axios.create({ baseURL, withCredentials: true, + timeout: 20000, }); } @@ -36,7 +37,7 @@ export abstract class APIService { return this.axiosInstance.patch(url, data, config); } - delete(url: string, data?: any, config = {}) { + delete(url: string, data?: Record | null | string, config = {}) { return this.axiosInstance.delete(url, { data, ...config }); } diff --git a/apps/live/src/server.ts b/apps/live/src/server.ts index c4a353538..69d0e642e 100644 --- a/apps/live/src/server.ts +++ b/apps/live/src/server.ts @@ -1,13 +1,13 @@ import compression from "compression"; import cors from "cors"; -import expressWs from "express-ws"; import express, { Request, Response } from "express"; +import expressWs from "express-ws"; import helmet from "helmet"; // hocuspocus server -import { getHocusPocusServer } from "@/core/hocuspocus-server.js"; // helpers import { convertHTMLDocumentToAllFormats } from "@/core/helpers/convert-document.js"; import { logger, manualLogger } from "@/core/helpers/logger.js"; +import { getHocusPocusServer } from "@/core/hocuspocus-server.js"; // types import { TConvertDocumentRequestBody } from "@/core/types/common.js"; diff --git a/apps/live/tsconfig.json b/apps/live/tsconfig.json index 810a68a5c..57d47a3d8 100644 --- a/apps/live/tsconfig.json +++ b/apps/live/tsconfig.json @@ -21,6 +21,6 @@ "emitDecoratorMetadata": true, "sourceRoot": "/" }, - "include": ["src/**/*.ts", "tsup.config.ts"], + "include": ["src/**/*.ts", "tsdown.config.ts"], "exclude": ["./dist", "./build", "./node_modules"] } diff --git a/apps/live/tsdown.config.ts b/apps/live/tsdown.config.ts new file mode 100644 index 000000000..2b97503a6 --- /dev/null +++ b/apps/live/tsdown.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "tsdown"; + +export default defineConfig({ + entry: ["src/server.ts"], + outDir: "dist", + format: ["esm", "cjs"], +}); diff --git a/apps/live/tsup.config.ts b/apps/live/tsup.config.ts deleted file mode 100644 index 05fbe7e86..000000000 --- a/apps/live/tsup.config.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { defineConfig } from "tsup"; - -export default defineConfig({ - entry: ["src/server.ts"], - format: ["esm", "cjs"], - dts: true, - splitting: false, - sourcemap: true, - minify: false, - target: "node18", - outDir: "dist", - env: { - NODE_ENV: process.env.NODE_ENV || "development", - }, -}); diff --git a/apps/proxy/Dockerfile.ce b/apps/proxy/Dockerfile.ce index 4d2f6dd0a..2c0f3ead5 100644 --- a/apps/proxy/Dockerfile.ce +++ b/apps/proxy/Dockerfile.ce @@ -5,7 +5,7 @@ RUN xcaddy build \ --with github.com/caddy-dns/digitalocean@04bde2867106aa1b44c2f9da41a285fa02e629c5 \ --with github.com/mholt/caddy-l4@4d3c80e89c5f80438a3e048a410d5543ff5fb9f4 -FROM caddy:2.10.0-builder-alpine +FROM caddy:2.10.0-alpine RUN apk add --no-cache nss-tools bash curl diff --git a/apps/space/.eslintignore b/apps/space/.eslintignore new file mode 100644 index 000000000..27e50ad7c --- /dev/null +++ b/apps/space/.eslintignore @@ -0,0 +1,12 @@ +.next/* +out/* +public/* +dist/* +node_modules/* +.turbo/* +.env* +.env +.env.local +.env.development +.env.production +.env.test \ No newline at end of file diff --git a/apps/space/.eslintrc.js b/apps/space/.eslintrc.js index 5a6f06067..1662fabf7 100644 --- a/apps/space/.eslintrc.js +++ b/apps/space/.eslintrc.js @@ -1,6 +1,4 @@ -/** @type {import("eslint").Linter.Config} */ module.exports = { root: true, extends: ["@plane/eslint-config/next.js"], - parser: "@typescript-eslint/parser", }; diff --git a/apps/space/.gitignore b/apps/space/.gitignore index a64f113f1..bc7846c3c 100644 --- a/apps/space/.gitignore +++ b/apps/space/.gitignore @@ -21,8 +21,6 @@ # debug npm-debug.log* -yarn-debug.log* -yarn-error.log* .pnpm-debug.log* # local env files diff --git a/apps/space/.prettierignore b/apps/space/.prettierignore index 43e8a7b8f..07bf87ab5 100644 --- a/apps/space/.prettierignore +++ b/apps/space/.prettierignore @@ -2,5 +2,6 @@ .vercel .tubro out/ -dis/ -build/ \ No newline at end of file +dist/ +build/ +node_modules/ diff --git a/apps/space/Dockerfile.dev b/apps/space/Dockerfile.dev index f735e0d84..b915aad08 100644 --- a/apps/space/Dockerfile.dev +++ b/apps/space/Dockerfile.dev @@ -1,16 +1,19 @@ FROM node:22-alpine + RUN apk add --no-cache libc6-compat + # Set working directory WORKDIR /app COPY . . -RUN yarn global add turbo -RUN yarn install +RUN corepack enable pnpm && pnpm add -g turbo +RUN pnpm install -EXPOSE 4000 +EXPOSE 3002 ENV NEXT_PUBLIC_SPACE_BASE_PATH="/spaces" -VOLUME [ "/app/node_modules", "/app/space/node_modules"] -CMD ["yarn","dev", "--filter=space"] +VOLUME [ "/app/node_modules", "/app/apps/space/node_modules"] + +CMD ["pnpm", "dev", "--filter=space"] diff --git a/apps/space/Dockerfile.space b/apps/space/Dockerfile.space index 525d72f7b..570511b9d 100644 --- a/apps/space/Dockerfile.space +++ b/apps/space/Dockerfile.space @@ -1,5 +1,11 @@ +# syntax=docker/dockerfile:1.7 FROM node:22-alpine AS base +# Setup pnpm package manager with corepack and configure global bin directory for caching +ENV PNPM_HOME="/pnpm" +ENV PATH="$PNPM_HOME:$PATH" +RUN corepack enable + # ***************************************************************************** # STAGE 1: Build the project # ***************************************************************************** @@ -7,7 +13,8 @@ FROM base AS builder RUN apk add --no-cache libc6-compat WORKDIR /app -RUN yarn global add turbo +ARG TURBO_VERSION=2.5.6 +RUN corepack enable pnpm && pnpm add -g turbo@${TURBO_VERSION} COPY . . RUN turbo prune --scope=space --docker @@ -22,11 +29,13 @@ WORKDIR /app COPY .gitignore .gitignore COPY --from=builder /app/out/json/ . -COPY --from=builder /app/out/yarn.lock ./yarn.lock -RUN yarn install --network-timeout 500000 +COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml +RUN corepack enable pnpm +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm fetch --store-dir=/pnpm/store COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json +RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install --offline --frozen-lockfile --store-dir=/pnpm/store ARG NEXT_PUBLIC_API_BASE_URL="" ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL @@ -49,7 +58,7 @@ ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL ENV NEXT_TELEMETRY_DISABLED=1 ENV TURBO_TELEMETRY_DISABLED=1 -RUN yarn turbo run build --filter=space +RUN pnpm turbo run build --filter=space # ***************************************************************************** # STAGE 3: Copy the project and start it @@ -91,4 +100,4 @@ ENV TURBO_TELEMETRY_DISABLED=1 EXPOSE 3000 -CMD ["node", "apps/space/server.js"] \ No newline at end of file +CMD ["node", "apps/space/server.js"] diff --git a/apps/space/app/[workspaceSlug]/[projectId]/page.ts b/apps/space/app/[workspaceSlug]/[projectId]/page.ts index 5fbb835dc..0badbe64f 100644 --- a/apps/space/app/[workspaceSlug]/[projectId]/page.ts +++ b/apps/space/app/[workspaceSlug]/[projectId]/page.ts @@ -10,7 +10,7 @@ type Props = { workspaceSlug: string; projectId: string; }; - searchParams: any; + searchParams: Record<"board" | "peekId", string | string[] | undefined>; }; export default async function IssuesPage(props: Props) { @@ -23,7 +23,7 @@ export default async function IssuesPage(props: Props) { try { response = await publishService.retrieveSettingsByProjectId(workspaceSlug, projectId); } catch (error) { - // redirect to 404 page on error + console.error("Error fetching project publish settings:", error); notFound(); } @@ -31,8 +31,8 @@ export default async function IssuesPage(props: Props) { if (response?.entity_name === "project") { url = `/issues/${response?.anchor}`; const params = new URLSearchParams(); - if (board) params.append("board", board); - if (peekId) params.append("peekId", peekId); + if (board) params.append("board", String(board)); + if (peekId) params.append("peekId", String(peekId)); if (params.toString()) url += `?${params.toString()}`; redirect(url); } else { diff --git a/apps/space/app/issues/[anchor]/client-layout.tsx b/apps/space/app/issues/[anchor]/client-layout.tsx index 0e24ab551..398591c4d 100644 --- a/apps/space/app/issues/[anchor]/client-layout.tsx +++ b/apps/space/app/issues/[anchor]/client-layout.tsx @@ -3,11 +3,13 @@ import { observer } from "mobx-react"; import useSWR from "swr"; // components -import { LogoSpinner, PoweredBy } from "@/components/common"; -import { IssuesNavbarRoot } from "@/components/issues"; +import { LogoSpinner } from "@/components/common/logo-spinner"; +import { PoweredBy } from "@/components/common/powered-by"; import { SomethingWentWrongError } from "@/components/issues/issue-layouts/error"; +import { IssuesNavbarRoot } from "@/components/issues/navbar"; // hooks -import { useIssueFilter, usePublish, usePublishList } from "@/hooks/store"; +import { usePublish, usePublishList } from "@/hooks/store/publish"; +import { useIssueFilter } from "@/hooks/store/use-issue-filter"; type Props = { children: React.ReactNode; @@ -39,7 +41,13 @@ export const IssuesClientLayout = observer((props: Props) => { : null ); - if (!publishSettings && !error) return ; + if (!publishSettings && !error) { + return ( +
+ +
+ ); + } if (error) return ; diff --git a/apps/space/app/issues/[anchor]/layout.tsx b/apps/space/app/issues/[anchor]/layout.tsx index 91631d6c0..46f187ddc 100644 --- a/apps/space/app/issues/[anchor]/layout.tsx +++ b/apps/space/app/issues/[anchor]/layout.tsx @@ -13,6 +13,11 @@ export async function generateMetadata({ params }: Props) { const { anchor } = params; const DEFAULT_TITLE = "Plane"; const DEFAULT_DESCRIPTION = "Made with Plane, an AI-powered work management platform with publishing capabilities."; + // Validate anchor before using in request (only allow alphanumeric, -, _) + const ANCHOR_REGEX = /^[a-zA-Z0-9_-]+$/; + if (!ANCHOR_REGEX.test(anchor)) { + return { title: DEFAULT_TITLE, description: DEFAULT_DESCRIPTION }; + } try { const response = await fetch(`${process.env.NEXT_PUBLIC_API_BASE_URL}/api/public/anchor/${anchor}/meta/`); const data = await response.json(); diff --git a/apps/space/app/issues/[anchor]/page.tsx b/apps/space/app/issues/[anchor]/page.tsx index 2bc37eecb..baff21324 100644 --- a/apps/space/app/issues/[anchor]/page.tsx +++ b/apps/space/app/issues/[anchor]/page.tsx @@ -4,9 +4,11 @@ import { observer } from "mobx-react"; import { useSearchParams } from "next/navigation"; import useSWR from "swr"; // components -import { IssuesLayoutsRoot } from "@/components/issues"; +import { IssuesLayoutsRoot } from "@/components/issues/issue-layouts"; // hooks -import { usePublish, useLabel, useStates } from "@/hooks/store"; +import { usePublish } from "@/hooks/store/publish"; +import { useLabel } from "@/hooks/store/use-label"; +import { useStates } from "@/hooks/store/use-state"; type Props = { params: { diff --git a/apps/space/app/page.tsx b/apps/space/app/page.tsx index a905f71b7..a75275e0d 100644 --- a/apps/space/app/page.tsx +++ b/apps/space/app/page.tsx @@ -2,16 +2,21 @@ import { observer } from "mobx-react"; // components -import { UserLoggedIn } from "@/components/account"; -import { LogoSpinner } from "@/components/common"; +import { UserLoggedIn } from "@/components/account/user-logged-in"; +import { LogoSpinner } from "@/components/common/logo-spinner"; import { AuthView } from "@/components/views"; // hooks -import { useUser } from "@/hooks/store"; +import { useUser } from "@/hooks/store/use-user"; const HomePage = observer(() => { - const { data: currentUser, isAuthenticated, isLoading } = useUser(); + const { data: currentUser, isAuthenticated, isInitializing } = useUser(); - if (isLoading) return ; + if (isInitializing) + return ( +
+ +
+ ); if (currentUser && isAuthenticated) return ; diff --git a/apps/space/app/provider.tsx b/apps/space/app/provider.tsx index e8566bc9d..af4940e24 100644 --- a/apps/space/app/provider.tsx +++ b/apps/space/app/provider.tsx @@ -1,6 +1,7 @@ "use client"; import { FC, ReactNode } from "react"; +import { ThemeProvider } from "next-themes"; // components import { TranslationProvider } from "@plane/i18n"; import { InstanceProvider } from "@/lib/instance-provider"; @@ -15,12 +16,14 @@ export const AppProvider: FC = (props) => { const { children } = props; return ( - - - - {children} - - - + + + + + {children} + + + + ); }; diff --git a/apps/space/app/views/[anchor]/layout.tsx b/apps/space/app/views/[anchor]/layout.tsx index 57b2971c4..e2a38071c 100644 --- a/apps/space/app/views/[anchor]/layout.tsx +++ b/apps/space/app/views/[anchor]/layout.tsx @@ -3,10 +3,11 @@ import { observer } from "mobx-react"; import useSWR from "swr"; // components -import { LogoSpinner, PoweredBy } from "@/components/common"; +import { LogoSpinner } from "@/components/common/logo-spinner"; +import { PoweredBy } from "@/components/common/powered-by"; import { SomethingWentWrongError } from "@/components/issues/issue-layouts/error"; // hooks -import { usePublish, usePublishList } from "@/hooks/store"; +import { usePublish, usePublishList } from "@/hooks/store/publish"; // Plane web import { ViewNavbarRoot } from "@/plane-web/components/navbar"; import { useView } from "@/plane-web/hooks/store"; @@ -18,7 +19,7 @@ type Props = { }; }; -const IssuesLayout = observer((props: Props) => { +const ViewsLayout = observer((props: Props) => { const { children, params } = props; // params const { anchor } = params; @@ -42,7 +43,13 @@ const IssuesLayout = observer((props: Props) => { if (error) return ; - if (!publishSettings || !viewData) return ; + if (!publishSettings || !viewData) { + return ( +
+ +
+ ); + } return (
@@ -55,4 +62,4 @@ const IssuesLayout = observer((props: Props) => { ); }); -export default IssuesLayout; +export default ViewsLayout; diff --git a/apps/space/app/views/[anchor]/page.tsx b/apps/space/app/views/[anchor]/page.tsx index 1efd95a53..5c877c89a 100644 --- a/apps/space/app/views/[anchor]/page.tsx +++ b/apps/space/app/views/[anchor]/page.tsx @@ -3,9 +3,9 @@ import { observer } from "mobx-react"; import { useSearchParams } from "next/navigation"; // components -import { PoweredBy } from "@/components/common"; +import { PoweredBy } from "@/components/common/powered-by"; // hooks -import { usePublish } from "@/hooks/store"; +import { usePublish } from "@/hooks/store/publish"; // plane-web import { ViewLayoutsRoot } from "@/plane-web/components/issue-layouts/root"; @@ -15,7 +15,7 @@ type Props = { }; }; -const IssuesPage = observer((props: Props) => { +const ViewsPage = observer((props: Props) => { const { params } = props; const { anchor } = params; // params @@ -34,4 +34,4 @@ const IssuesPage = observer((props: Props) => { ); }); -export default IssuesPage; +export default ViewsPage; diff --git a/apps/space/ce/components/issue-layouts/root.tsx b/apps/space/ce/components/issue-layouts/root.tsx index 5fa40fe11..028bf4e91 100644 --- a/apps/space/ce/components/issue-layouts/root.tsx +++ b/apps/space/ce/components/issue-layouts/root.tsx @@ -1,10 +1,9 @@ import { PageNotFound } from "@/components/ui/not-found"; -import { PublishStore } from "@/store/publish/publish.store"; +import type { PublishStore } from "@/store/publish/publish.store"; type Props = { peekId: string | undefined; publishSettings: PublishStore; }; -// eslint-disable-next-line @typescript-eslint/no-unused-vars -export const ViewLayoutsRoot = (props: Props) => ; +export const ViewLayoutsRoot = (_props: Props) => ; diff --git a/apps/space/ce/components/navbar/index.tsx b/apps/space/ce/components/navbar/index.tsx index 6e6fa4441..0d00777ce 100644 --- a/apps/space/ce/components/navbar/index.tsx +++ b/apps/space/ce/components/navbar/index.tsx @@ -1,4 +1,4 @@ -import { PublishStore } from "@/store/publish/publish.store"; +import type { PublishStore } from "@/store/publish/publish.store"; type Props = { publishSettings: PublishStore; diff --git a/apps/space/ce/hooks/use-editor-flagging.ts b/apps/space/ce/hooks/use-editor-flagging.ts new file mode 100644 index 000000000..7b4bc38c3 --- /dev/null +++ b/apps/space/ce/hooks/use-editor-flagging.ts @@ -0,0 +1,35 @@ +// editor +import { TExtensions } from "@plane/editor"; + +export type TEditorFlaggingHookReturnType = { + document: { + disabled: TExtensions[]; + flagged: TExtensions[]; + }; + liteText: { + disabled: TExtensions[]; + flagged: TExtensions[]; + }; + richText: { + disabled: TExtensions[]; + flagged: TExtensions[]; + }; +}; + +/** + * @description extensions disabled in various editors + */ +export const useEditorFlagging = (anchor: string): TEditorFlaggingHookReturnType => ({ + document: { + disabled: [], + flagged: [], + }, + liteText: { + disabled: [], + flagged: [], + }, + richText: { + disabled: [], + flagged: [], + }, +}); diff --git a/apps/space/core/components/account/auth-forms/auth-header.tsx b/apps/space/core/components/account/auth-forms/auth-header.tsx index 95a539ddf..f75dccecf 100644 --- a/apps/space/core/components/account/auth-forms/auth-header.tsx +++ b/apps/space/core/components/account/auth-forms/auth-header.tsx @@ -1,12 +1,11 @@ "use client"; -import { FC, ReactNode } from "react"; +import { FC } from "react"; // helpers import { EAuthModes } from "@/types/auth"; type TAuthHeader = { authMode: EAuthModes; - children: ReactNode; }; type TAuthHeaderContent = { @@ -30,7 +29,7 @@ const Titles: TAuthHeaderDetails = { }; export const AuthHeader: FC = (props) => { - const { authMode, children } = props; + const { authMode } = props; const getHeaderSubHeader = (mode: EAuthModes | null): TAuthHeaderContent => { if (mode) { @@ -38,7 +37,7 @@ export const AuthHeader: FC = (props) => { } return { - header: "Comment or react to work itemss", + header: "Comment or react to work items", subHeader: "Use plane to add your valuable inputs to features.", }; }; @@ -47,11 +46,10 @@ export const AuthHeader: FC = (props) => { return ( <> -
-

{header}

-

{subHeader}

+
+ {header} + {subHeader}
- {children} ); }; diff --git a/apps/space/core/components/account/auth-forms/auth-root.tsx b/apps/space/core/components/account/auth-forms/auth-root.tsx index 2ce944a25..e71a3a08d 100644 --- a/apps/space/core/components/account/auth-forms/auth-root.tsx +++ b/apps/space/core/components/account/auth-forms/auth-root.tsx @@ -2,20 +2,15 @@ import React, { FC, useEffect, useState } from "react"; import { observer } from "mobx-react"; +import Image from "next/image"; import { useSearchParams } from "next/navigation"; +import { useTheme } from "next-themes"; // plane imports +import { API_BASE_URL } from "@plane/constants"; import { SitesAuthService } from "@plane/services"; import { IEmailCheckData } from "@plane/types"; +import { OAuthOptions } from "@plane/ui"; // components -import { - AuthHeader, - AuthBanner, - AuthEmailForm, - AuthUniqueCodeForm, - AuthPasswordForm, - OAuthOptions, - TermsAndConditions, -} from "@/components/account"; // helpers import { EAuthenticationErrorCodes, @@ -24,9 +19,21 @@ import { authErrorHandler, } from "@/helpers/authentication.helper"; // hooks -import { useInstance } from "@/hooks/store"; +import { useInstance } from "@/hooks/store/use-instance"; // types import { EAuthModes, EAuthSteps } from "@/types/auth"; +// assets +import GithubLightLogo from "/public/logos/github-black.png"; +import GithubDarkLogo from "/public/logos/github-dark.svg"; +import GitlabLogo from "/public/logos/gitlab-logo.svg"; +import GoogleLogo from "/public/logos/google-logo.svg"; +// local imports +import { TermsAndConditions } from "../terms-and-conditions"; +import { AuthBanner } from "./auth-banner"; +import { AuthHeader } from "./auth-header"; +import { AuthEmailForm } from "./email"; +import { AuthPasswordForm } from "./password"; +import { AuthUniqueCodeForm } from "./unique-code"; const authService = new SitesAuthService(); @@ -36,6 +43,7 @@ export const AuthRoot: FC = observer(() => { const emailParam = searchParams.get("email") || undefined; const error_code = searchParams.get("error_code") || undefined; const nextPath = searchParams.get("next_path") || undefined; + const next_path = searchParams.get("next_path"); // states const [authMode, setAuthMode] = useState(EAuthModes.SIGN_UP); const [authStep, setAuthStep] = useState(EAuthSteps.EMAIL); @@ -43,6 +51,7 @@ export const AuthRoot: FC = observer(() => { const [errorInfo, setErrorInfo] = useState(undefined); const [isPasswordAutoset, setIsPasswordAutoset] = useState(true); // hooks + const { resolvedTheme } = useTheme(); const { config } = useInstance(); useEffect(() => { @@ -146,12 +155,54 @@ export const AuthRoot: FC = observer(() => { }); }; + const content = authMode === EAuthModes.SIGN_UP ? "Sign up" : "Sign in"; + + const OAuthConfig = [ + { + id: "google", + text: `${content} with Google`, + icon: Google Logo, + onClick: () => { + window.location.assign(`${API_BASE_URL}/auth/google/${next_path ? `?next_path=${next_path}` : ``}`); + }, + enabled: config?.is_google_enabled, + }, + { + id: "github", + text: `${content} with GitHub`, + icon: ( + GitHub Logo + ), + onClick: () => { + window.location.assign(`${API_BASE_URL}/auth/github/${next_path ? `?next_path=${next_path}` : ``}`); + }, + enabled: config?.is_github_enabled, + }, + { + id: "gitlab", + text: `${content} with GitLab`, + icon: GitLab Logo, + onClick: () => { + window.location.assign(`${API_BASE_URL}/auth/gitlab/${next_path ? `?next_path=${next_path}` : ``}`); + }, + enabled: config?.is_gitlab_enabled, + }, + ]; + return ( -
- +
+
{errorInfo && errorInfo?.type === EErrorAlertType.BANNER_ALERT && ( setErrorInfo(value)} /> )} + + {isOAuthEnabled && } + {authStep === EAuthSteps.EMAIL && } {authStep === EAuthSteps.UNIQUE_CODE && ( { }} /> )} - {isOAuthEnabled && } - +
); }); diff --git a/apps/space/core/components/account/auth-forms/email.tsx b/apps/space/core/components/account/auth-forms/email.tsx index 4815fef6a..6fb08ff7a 100644 --- a/apps/space/core/components/account/auth-forms/email.tsx +++ b/apps/space/core/components/account/auth-forms/email.tsx @@ -46,13 +46,13 @@ export const AuthEmailForm: FC = observer((props) => { return (
-