release: v0.26.0 #6962

This commit is contained in:
sriram veeraghanta 2025-04-28 18:24:37 +05:30 committed by GitHub
commit 461e099bbc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
586 changed files with 25939 additions and 6120 deletions

View file

@ -273,7 +273,7 @@ jobs:
run: |
cp ./deploy/selfhost/install.sh deploy/selfhost/setup.sh
sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deploy/selfhost/docker-compose.yml
sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
# sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deploy/selfhost/variables.env
- name: Create Release
id: create_release

7
.gitignore vendored
View file

@ -78,10 +78,17 @@ pnpm-workspace.yaml
.npmrc
.secrets
tmp/
## packages
dist
.temp/
deploy/selfhost/plane-app/
## Storybook
*storybook.log
output.css
dev-editor
# Redis
*.rdb
*.rdb.gz

View file

@ -15,6 +15,22 @@ Without said minimal reproduction, we won't be able to investigate all [issues](
You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
### Naming conventions for issues
When opening a new issue, please use a clear and concise title that follows this format:
- For bugs: `🐛 Bug: [short description]`
- For features: `🚀 Feature: [short description]`
- For improvements: `🛠️ Improvement: [short description]`
- For documentation: `📘 Docs: [short description]`
**Examples:**
- `🐛 Bug: API token expiry time not saving correctly`
- `📘 Docs: Clarify RAM requirement for local setup`
- `🚀 Feature: Allow custom time selection for token expiration`
This helps us triage and manage issues more efficiently.
## Projects setup and Architecture
### Requirements
@ -23,6 +39,8 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
- Python version 3.8+
- Postgres version v14
- Redis version v6.2.7
- **Memory**: Minimum **12 GB RAM** recommended
> ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
### Setup the project

View file

@ -43,9 +43,6 @@ NGINX_PORT=80
# Debug value for api server use it as 0 for production use
DEBUG=0
CORS_ALLOWED_ORIGINS="http://localhost"
# Error logs
SENTRY_DSN=""
SENTRY_ENVIRONMENT="development"
# Database Settings
POSTGRES_USER="plane"
POSTGRES_PASSWORD="plane"

View file

@ -16,10 +16,10 @@
</p>
<p align="center">
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a>
<a href="https://git.new/releases"><b>Releases</b></a>
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a>
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
<a href="https://plane.so/"><b>Website</b></a>
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a>
<a href="https://twitter.com/planepowers"><b>Twitter</b></a>
<a href="https://docs.plane.so/"><b>Documentation</b></a>
</p>
<p>
@ -39,7 +39,7 @@
</a>
</p>
Meet [Plane](https://dub.sh/plane-website-readme), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘‍♀️
Meet [Plane](https://plane.so/), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘‍♀️
> Plane is evolving every day. Your suggestions, ideas, and reported bugs help us immensely. Do not hesitate to join in the conversation on [Discord](https://discord.com/invite/A92xrEGCge) or raise a GitHub issue. We read everything and respond to most.

View file

@ -43,6 +43,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
defaultValues: {
GITHUB_CLIENT_ID: config["GITHUB_CLIENT_ID"],
GITHUB_CLIENT_SECRET: config["GITHUB_CLIENT_SECRET"],
GITHUB_ORGANIZATION_ID: config["GITHUB_ORGANIZATION_ID"],
},
});
@ -93,6 +94,19 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
error: Boolean(errors.GITHUB_CLIENT_SECRET),
required: true,
},
{
key: "GITHUB_ORGANIZATION_ID",
type: "text",
label: "Organization ID",
description: (
<>
The organization github ID.
</>
),
placeholder: "123456789",
error: Boolean(errors.GITHUB_ORGANIZATION_ID),
required: false,
},
];
const GITHUB_SERVICE_FIELD: TCopyField[] = [
@ -150,6 +164,7 @@ export const InstanceGithubConfigForm: FC<Props> = (props) => {
reset({
GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
GITHUB_CLIENT_SECRET: response.find((item) => item.key === "GITHUB_CLIENT_SECRET")?.value,
GITHUB_ORGANIZATION_ID: response.find((item) => item.key === "GITHUB_ORGANIZATION_ID")?.value,
});
})
.catch((err) => console.error(err));

View file

@ -9,6 +9,19 @@ const nextConfig = {
unoptimized: true,
},
basePath: process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "",
transpilePackages: [
"@plane/constants",
"@plane/editor",
"@plane/hooks",
"@plane/i18n",
"@plane/logger",
"@plane/propel",
"@plane/services",
"@plane/shared-state",
"@plane/types",
"@plane/ui",
"@plane/utils",
],
};
module.exports = nextConfig;

View file

@ -1,7 +1,7 @@
{
"name": "admin",
"description": "Admin UI for Plane",
"version": "0.25.3",
"version": "0.26.0",
"license": "AGPL-3.0",
"private": true,
"scripts": {
@ -21,7 +21,6 @@
"@plane/ui": "*",
"@plane/utils": "*",
"@plane/services": "*",
"@sentry/nextjs": "^8.54.0",
"@tailwindcss/typography": "^0.5.9",
"@types/lodash": "^4.17.0",
"autoprefixer": "10.4.14",
@ -30,7 +29,7 @@
"lucide-react": "^0.469.0",
"mobx": "^6.12.0",
"mobx-react": "^9.1.1",
"next": "^14.2.20",
"next": "^14.2.26",
"next-themes": "^0.2.1",
"postcss": "^8.4.38",
"react": "^18.3.1",

View file

@ -145,11 +145,8 @@ RUN chmod +x /app/pg-setup.sh
# APPLICATION ENVIRONMENT SETTINGS
# *****************************************************************************
ENV APP_DOMAIN=localhost
ENV WEB_URL=http://${APP_DOMAIN}
ENV DEBUG=0
ENV SENTRY_DSN=
ENV SENTRY_ENVIRONMENT=production
ENV CORS_ALLOWED_ORIGINS=http://${APP_DOMAIN},https://${APP_DOMAIN}
# Secret Key
ENV SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5

View file

@ -3,10 +3,6 @@
DEBUG=0
CORS_ALLOWED_ORIGINS="http://localhost"
# Error logs
SENTRY_DSN=""
SENTRY_ENVIRONMENT="development"
# Database Settings
POSTGRES_USER="plane"
POSTGRES_PASSWORD="plane"

View file

View file

@ -1,6 +1,6 @@
{
"name": "plane-api",
"version": "0.25.3",
"version": "0.26.0",
"license": "AGPL-3.0",
"private": true,
"description": "API server powering Plane's backend"

View file

@ -1,4 +1,5 @@
# Third party imports
import pytz
from rest_framework import serializers
# Module imports
@ -18,6 +19,14 @@ class CycleSerializer(BaseSerializer):
completed_estimates = serializers.FloatField(read_only=True)
started_estimates = serializers.FloatField(read_only=True)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
project = self.context.get("project")
if project and project.timezone:
project_timezone = pytz.timezone(project.timezone)
self.fields["start_date"].timezone = project_timezone
self.fields["end_date"].timezone = project_timezone
def validate(self, data):
if (
data.get("start_date", None) is not None
@ -30,7 +39,15 @@ class CycleSerializer(BaseSerializer):
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
):
project_id = self.initial_data.get("project_id") or self.instance.project_id
project_id = self.initial_data.get("project_id") or (
self.instance.project_id
if self.instance and hasattr(self.instance, "project_id")
else None
)
if not project_id:
raise serializers.ValidationError("Project ID is required")
is_start_date_end_date_equal = (
True
if str(data.get("start_date")) == str(data.get("end_date"))

View file

@ -16,7 +16,6 @@ class ProjectSerializer(BaseSerializer):
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
cover_image_url = serializers.CharField(read_only=True)
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
class Meta:
model = Project

View file

@ -4,16 +4,6 @@ from plane.api.views import IntakeIssueAPIEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
IntakeIssueAPIEndpoint.as_view(),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
IntakeIssueAPIEndpoint.as_view(),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
IntakeIssueAPIEndpoint.as_view(),

View file

@ -39,7 +39,7 @@ from plane.db.models import (
UserFavorite,
)
from plane.utils.analytics_plot import burndown_plot
from plane.utils.host import base_host
from .base import BaseAPIView
from plane.bgtasks.webhook_task import model_activity
@ -137,10 +137,14 @@ class CycleAPIEndpoint(BaseAPIView):
)
def get(self, request, slug, project_id, pk=None):
project = Project.objects.get(workspace__slug=slug, pk=project_id)
if pk:
queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
data = CycleSerializer(
queryset, fields=self.fields, expand=self.expand
queryset,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data
return Response(data, status=status.HTTP_200_OK)
queryset = self.get_queryset().filter(archived_at__isnull=True)
@ -152,7 +156,11 @@ class CycleAPIEndpoint(BaseAPIView):
start_date__lte=timezone.now(), end_date__gte=timezone.now()
)
data = CycleSerializer(
queryset, many=True, fields=self.fields, expand=self.expand
queryset,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data
return Response(data, status=status.HTTP_200_OK)
@ -163,7 +171,11 @@ class CycleAPIEndpoint(BaseAPIView):
request=request,
queryset=(queryset),
on_results=lambda cycles: CycleSerializer(
cycles, many=True, fields=self.fields, expand=self.expand
cycles,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data,
)
@ -174,7 +186,11 @@ class CycleAPIEndpoint(BaseAPIView):
request=request,
queryset=(queryset),
on_results=lambda cycles: CycleSerializer(
cycles, many=True, fields=self.fields, expand=self.expand
cycles,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data,
)
@ -185,7 +201,11 @@ class CycleAPIEndpoint(BaseAPIView):
request=request,
queryset=(queryset),
on_results=lambda cycles: CycleSerializer(
cycles, many=True, fields=self.fields, expand=self.expand
cycles,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data,
)
@ -198,14 +218,22 @@ class CycleAPIEndpoint(BaseAPIView):
request=request,
queryset=(queryset),
on_results=lambda cycles: CycleSerializer(
cycles, many=True, fields=self.fields, expand=self.expand
cycles,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data,
)
return self.paginate(
request=request,
queryset=(queryset),
on_results=lambda cycles: CycleSerializer(
cycles, many=True, fields=self.fields, expand=self.expand
cycles,
many=True,
fields=self.fields,
expand=self.expand,
context={"project": project},
).data,
)
@ -251,7 +279,7 @@ class CycleAPIEndpoint(BaseAPIView):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -323,7 +351,7 @@ class CycleAPIEndpoint(BaseAPIView):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -694,7 +722,7 @@ class CycleIssueAPIEndpoint(BaseAPIView):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# Return all Cycle Issues
return Response(
@ -1168,7 +1196,7 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response({"message": "Success"}, status=status.HTTP_200_OK)

View file

@ -18,8 +18,9 @@ from plane.api.serializers import IntakeIssueSerializer, IssueSerializer
from plane.app.permissions import ProjectLitePermission
from plane.bgtasks.issue_activities_task import issue_activity
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
from plane.utils.host import base_host
from .base import BaseAPIView
from plane.db.models.intake import SourceType
class IntakeIssueAPIEndpoint(BaseAPIView):
@ -125,7 +126,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
intake_id=intake.id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "IN-APP"),
source=SourceType.IN_APP,
)
# Create an Issue Activity
issue_activity.delay(
@ -297,7 +298,7 @@ class IntakeIssueAPIEndpoint(BaseAPIView):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=False,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
intake=str(intake_issue.id),
)

View file

@ -56,6 +56,8 @@ from plane.db.models import (
from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from .base import BaseAPIView
from plane.utils.host import base_host
from plane.bgtasks.webhook_task import model_activity
class WorkspaceIssueAPIEndpoint(BaseAPIView):
@ -321,6 +323,17 @@ class IssueAPIEndpoint(BaseAPIView):
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
# Send the model activity
model_activity.delay(
model_name="issue",
model_id=str(serializer.data["id"]),
requested_data=request.data,
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -1048,7 +1061,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# Get the storage metadata
@ -1108,7 +1121,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# Update the attachment

View file

@ -33,6 +33,7 @@ from plane.db.models import (
from .base import BaseAPIView
from plane.bgtasks.webhook_task import model_activity
from plane.utils.host import base_host
class ModuleAPIEndpoint(BaseAPIView):
@ -174,7 +175,7 @@ class ModuleAPIEndpoint(BaseAPIView):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
module = Module.objects.get(pk=serializer.data["id"])
serializer = ModuleSerializer(module)
@ -226,7 +227,7 @@ class ModuleAPIEndpoint(BaseAPIView):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_200_OK)
@ -280,6 +281,7 @@ class ModuleAPIEndpoint(BaseAPIView):
project_id=str(project_id),
current_instance=json.dumps({"module_name": str(module.name)}),
epoch=int(timezone.now().timestamp()),
origin=base_host(request=request, is_app=True),
)
module.delete()
# Delete the module issues
@ -449,6 +451,7 @@ class ModuleIssueAPIEndpoint(BaseAPIView):
}
),
epoch=int(timezone.now().timestamp()),
origin=base_host(request=request, is_app=True),
)
return Response(

View file

@ -30,6 +30,7 @@ from plane.db.models import (
)
from plane.bgtasks.webhook_task import model_activity, webhook_activity
from .base import BaseAPIView
from plane.utils.host import base_host
class ProjectAPIEndpoint(BaseAPIView):
@ -228,7 +229,7 @@ class ProjectAPIEndpoint(BaseAPIView):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
serializer = ProjectSerializer(project)
@ -238,7 +239,7 @@ class ProjectAPIEndpoint(BaseAPIView):
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
except Workspace.DoesNotExist:
return Response(
@ -247,7 +248,7 @@ class ProjectAPIEndpoint(BaseAPIView):
except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
def patch(self, request, slug, pk):
@ -258,9 +259,7 @@ class ProjectAPIEndpoint(BaseAPIView):
ProjectSerializer(project).data, cls=DjangoJSONEncoder
)
intake_view = request.data.get(
"inbox_view", request.data.get("intake_view", project.intake_view)
)
intake_view = request.data.get("intake_view", project.intake_view)
if project.archived_at:
return Response(
@ -297,7 +296,7 @@ class ProjectAPIEndpoint(BaseAPIView):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
serializer = ProjectSerializer(project)
@ -307,7 +306,7 @@ class ProjectAPIEndpoint(BaseAPIView):
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response(
@ -316,7 +315,7 @@ class ProjectAPIEndpoint(BaseAPIView):
except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
def delete(self, request, slug, pk):
@ -334,7 +333,7 @@ class ProjectAPIEndpoint(BaseAPIView):
new_value=None,
actor_id=request.user.id,
slug=slug,
current_site=request.META.get("HTTP_ORIGIN"),
current_site=base_host(request=request, is_app=True),
event_id=project.id,
old_identifier=None,
new_identifier=None,

View file

@ -1,5 +1,7 @@
from .base import BaseSerializer
from plane.db.models import APIToken, APIActivityLog
from rest_framework import serializers
from django.utils import timezone
class APITokenSerializer(BaseSerializer):
@ -17,10 +19,17 @@ class APITokenSerializer(BaseSerializer):
class APITokenReadSerializer(BaseSerializer):
is_active = serializers.SerializerMethodField()
class Meta:
model = APIToken
exclude = ("token",)
def get_is_active(self, obj: APIToken) -> bool:
if obj.expired_at is None:
return True
return timezone.now() < obj.expired_at
class APIActivityLogSerializer(BaseSerializer):
class Meta:

View file

@ -352,8 +352,19 @@ class IssueRelationSerializer(BaseSerializer):
"state_id",
"priority",
"assignee_ids",
"created_by",
"created_at",
"updated_at",
"updated_by",
]
read_only_fields = [
"workspace",
"project",
"created_by",
"created_at",
"updated_by",
"updated_at",
]
read_only_fields = ["workspace", "project"]
class RelatedIssueSerializer(BaseSerializer):
@ -383,8 +394,19 @@ class RelatedIssueSerializer(BaseSerializer):
"state_id",
"priority",
"assignee_ids",
"created_by",
"created_at",
"updated_by",
"updated_at",
]
read_only_fields = [
"workspace",
"project",
"created_by",
"created_at",
"updated_by",
"updated_at",
]
read_only_fields = ["workspace", "project"]
class IssueAssigneeSerializer(BaseSerializer):

View file

@ -1,7 +1,11 @@
from django.urls import path
from plane.app.views import IntakeViewSet, IntakeIssueViewSet
from plane.app.views import (
IntakeViewSet,
IntakeIssueViewSet,
IntakeWorkItemDescriptionVersionEndpoint,
)
urlpatterns = [
@ -53,4 +57,14 @@ urlpatterns = [
),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/",
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
name="intake-work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
name="intake-work-item-versions",
),
]

View file

@ -25,7 +25,7 @@ from plane.app.views import (
IssueAttachmentV2Endpoint,
IssueBulkUpdateDateEndpoint,
IssueVersionEndpoint,
IssueDescriptionVersionEndpoint,
WorkItemDescriptionVersionEndpoint,
IssueMetaEndpoint,
IssueDetailIdentifierEndpoint,
)
@ -263,22 +263,22 @@ urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/",
IssueVersionEndpoint.as_view(),
name="page-versions",
name="issue-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/<uuid:pk>/",
IssueVersionEndpoint.as_view(),
name="page-versions",
name="issue-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/",
IssueDescriptionVersionEndpoint.as_view(),
name="page-versions",
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/",
WorkItemDescriptionVersionEndpoint.as_view(),
name="work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/description-versions/<uuid:pk>/",
IssueDescriptionVersionEndpoint.as_view(),
name="page-versions",
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
WorkItemDescriptionVersionEndpoint.as_view(),
name="work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/meta/",

View file

@ -144,7 +144,7 @@ from .issue.sub_issue import SubIssuesEndpoint
from .issue.subscriber import IssueSubscriberViewSet
from .issue.version import IssueVersionEndpoint, IssueDescriptionVersionEndpoint
from .issue.version import IssueVersionEndpoint, WorkItemDescriptionVersionEndpoint
from .module.base import (
ModuleViewSet,
@ -184,7 +184,11 @@ from .estimate.base import (
EstimatePointEndpoint,
)
from .intake.base import IntakeViewSet, IntakeIssueViewSet
from .intake.base import (
IntakeViewSet,
IntakeIssueViewSet,
IntakeWorkItemDescriptionVersionEndpoint,
)
from .analytic.base import (
AnalyticsEndpoint,

View file

@ -137,7 +137,7 @@ class UserAssetsV2Endpoint(BaseAPIView):
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
@ -351,7 +351,7 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
@ -552,7 +552,7 @@ class ProjectAssetEndpoint(BaseAPIView):
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
@ -683,7 +683,7 @@ class ProjectBulkAssetEndpoint(BaseAPIView):
# For some cases, the bulk api is called after the issue is deleted creating
# an integrity error
try:
assets.update(issue_id=entity_id)
assets.update(issue_id=entity_id, project_id=project_id)
except IntegrityError:
pass

View file

@ -51,8 +51,7 @@ from plane.db.models import (
)
from plane.utils.analytics_plot import burndown_plot
from plane.bgtasks.recent_visited_task import recent_visited_task
# Module imports
from plane.utils.host import base_host
from .. import BaseAPIView, BaseViewSet
from plane.bgtasks.webhook_task import model_activity
from plane.utils.timezone_converter import convert_to_utc, user_timezone_converter
@ -268,7 +267,7 @@ class CycleViewSet(BaseViewSet):
)
datetime_fields = ["start_date", "end_date"]
data = user_timezone_converter(
data, datetime_fields, request.user.user_timezone
data, datetime_fields, project_timezone
)
return Response(data, status=status.HTTP_200_OK)
@ -318,9 +317,13 @@ class CycleViewSet(BaseViewSet):
.first()
)
# Fetch the project timezone
project = Project.objects.get(id=self.kwargs.get("project_id"))
project_timezone = project.timezone
datetime_fields = ["start_date", "end_date"]
cycle = user_timezone_converter(
cycle, datetime_fields, request.user.user_timezone
cycle, datetime_fields, project_timezone
)
# Send the model activity
@ -331,7 +334,7 @@ class CycleViewSet(BaseViewSet):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(cycle, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -407,9 +410,13 @@ class CycleViewSet(BaseViewSet):
"created_by",
).first()
# Fetch the project timezone
project = Project.objects.get(id=self.kwargs.get("project_id"))
project_timezone = project.timezone
datetime_fields = ["start_date", "end_date"]
cycle = user_timezone_converter(
cycle, datetime_fields, request.user.user_timezone
cycle, datetime_fields, project_timezone
)
# Send the model activity
@ -420,7 +427,7 @@ class CycleViewSet(BaseViewSet):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(cycle, status=status.HTTP_200_OK)
@ -480,10 +487,11 @@ class CycleViewSet(BaseViewSet):
)
queryset = queryset.first()
# Fetch the project timezone
project = Project.objects.get(id=self.kwargs.get("project_id"))
project_timezone = project.timezone
datetime_fields = ["start_date", "end_date"]
data = user_timezone_converter(
data, datetime_fields, request.user.user_timezone
)
data = user_timezone_converter(data, datetime_fields, project_timezone)
recent_visited_task.delay(
slug=slug,
@ -532,7 +540,7 @@ class CycleViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# TODO: Soft delete the cycle break the onetoone relationship with cycle issue
cycle.delete()
@ -1071,7 +1079,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response({"message": "Success"}, status=status.HTTP_200_OK)

View file

@ -27,7 +27,7 @@ from plane.utils.issue_filters import issue_filters
from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
from plane.app.permissions import allow_permission, ROLE
from plane.utils.host import base_host
class CycleIssueViewSet(BaseViewSet):
serializer_class = CycleIssueSerializer
@ -291,7 +291,7 @@ class CycleIssueViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
@ -317,7 +317,7 @@ class CycleIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
cycle_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -27,16 +27,24 @@ from plane.db.models import (
Project,
ProjectMember,
CycleIssue,
IssueDescriptionVersion,
)
from plane.app.serializers import (
IssueCreateSerializer,
IssueSerializer,
IssueDetailSerializer,
IntakeSerializer,
IntakeIssueSerializer,
IntakeIssueDetailSerializer,
IssueDescriptionVersionDetailSerializer,
)
from plane.utils.issue_filters import issue_filters
from plane.bgtasks.issue_activities_task import issue_activity
from plane.bgtasks.issue_description_version_task import issue_description_version_task
from plane.app.views.base import BaseAPIView
from plane.utils.timezone_converter import user_timezone_converter
from plane.utils.global_paginator import paginate
from plane.utils.host import base_host
from plane.db.models.intake import SourceType
class IntakeViewSet(BaseViewSet):
@ -87,7 +95,7 @@ class IntakeIssueViewSet(BaseViewSet):
serializer_class = IntakeIssueSerializer
model = IntakeIssue
filterset_fields = ["statulls"]
filterset_fields = ["status"]
def get_queryset(self):
return (
@ -218,7 +226,7 @@ class IntakeIssueViewSet(BaseViewSet):
workspace__slug=slug,
project_id=project_id,
member=request.user,
role=5,
role=ROLE.GUEST.value,
is_active=True,
).exists()
and not project.guest_view_all_features
@ -271,7 +279,7 @@ class IntakeIssueViewSet(BaseViewSet):
intake_id=intake_id.id,
project_id=project_id,
issue_id=serializer.data["id"],
source=request.data.get("source", "IN-APP"),
source=SourceType.IN_APP,
)
# Create an Issue Activity
issue_activity.delay(
@ -283,9 +291,16 @@ class IntakeIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
intake=str(intake_issue.id),
)
# updated issue description version
issue_description_version_task.delay(
updated_issue=json.dumps(request.data, cls=DjangoJSONEncoder),
issue_id=str(serializer.data["id"]),
user_id=request.user.id,
is_creating=True,
)
intake_issue = (
IntakeIssue.objects.select_related("issue")
.prefetch_related("issue__labels", "issue__assignees")
@ -385,13 +400,15 @@ class IntakeIssueViewSet(BaseViewSet):
),
"description": issue_data.get("description", issue.description),
}
current_instance = json.dumps(
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
)
issue_serializer = IssueCreateSerializer(
issue, data=issue_data, partial=True, context={"project_id": project_id}
)
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
@ -401,15 +418,18 @@ class IntakeIssueViewSet(BaseViewSet):
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
intake=str(intake_issue.id),
)
# updated issue description version
issue_description_version_task.delay(
updated_issue=current_instance,
issue_id=str(pk),
user_id=request.user.id,
)
issue_serializer.save()
else:
return Response(
@ -467,7 +487,7 @@ class IntakeIssueViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=False,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
intake=(intake_issue.id),
)
@ -549,7 +569,7 @@ class IntakeIssueViewSet(BaseViewSet):
workspace__slug=slug,
project_id=project_id,
member=request.user,
role=5,
role=ROLE.GUEST.value,
is_active=True,
).exists()
and not project.guest_view_all_features
@ -557,7 +577,7 @@ class IntakeIssueViewSet(BaseViewSet):
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_400_BAD_REQUEST,
status=status.HTTP_403_FORBIDDEN,
)
issue = IntakeIssueDetailSerializer(intake_issue).data
return Response(issue, status=status.HTTP_200_OK)
@ -584,3 +604,80 @@ class IntakeIssueViewSet(BaseViewSet):
intake_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class IntakeWorkItemDescriptionVersionEndpoint(BaseAPIView):
def process_paginated_result(self, fields, results, timezone):
paginated_data = results.values(*fields)
datetime_fields = ["created_at", "updated_at"]
paginated_data = user_timezone_converter(
paginated_data, datetime_fields, timezone
)
return paginated_data
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id, work_item_id, pk=None):
project = Project.objects.get(pk=project_id)
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=work_item_id
)
if (
ProjectMember.objects.filter(
workspace__slug=slug,
project_id=project_id,
member=request.user,
role=ROLE.GUEST.value,
is_active=True,
).exists()
and not project.guest_view_all_features
and not issue.created_by == request.user
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_403_FORBIDDEN,
)
if pk:
issue_description_version = IssueDescriptionVersion.objects.get(
workspace__slug=slug,
project_id=project_id,
issue_id=work_item_id,
pk=pk,
)
serializer = IssueDescriptionVersionDetailSerializer(
issue_description_version
)
return Response(serializer.data, status=status.HTTP_200_OK)
cursor = request.GET.get("cursor", None)
required_fields = [
"id",
"workspace",
"project",
"issue",
"last_saved_at",
"owned_by",
"created_at",
"updated_at",
"created_by",
"updated_by",
]
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
)
paginated_data = paginate(
base_queryset=issue_description_versions_queryset,
queryset=issue_description_versions_queryset,
cursor=cursor,
on_result=lambda results: self.process_paginated_result(
required_fields, results, request.user.user_timezone
),
)
return Response(paginated_data, status=status.HTTP_200_OK)

View file

@ -37,7 +37,7 @@ from plane.utils.order_queryset import order_issue_queryset
from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPaginator
from plane.app.permissions import allow_permission, ROLE
from plane.utils.error_codes import ERROR_CODES
from plane.utils.host import base_host
# Module imports
from .. import BaseViewSet, BaseAPIView
@ -259,7 +259,7 @@ class IssueArchiveViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue.archived_at = timezone.now().date()
issue.save()
@ -287,7 +287,7 @@ class IssueArchiveViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue.archived_at = None
issue.save()
@ -333,7 +333,7 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue.archived_at = timezone.now().date()
bulk_archive_issues.append(issue)

View file

@ -21,7 +21,7 @@ from plane.bgtasks.issue_activities_task import issue_activity
from plane.app.permissions import allow_permission, ROLE
from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from plane.utils.host import base_host
class IssueAttachmentEndpoint(BaseAPIView):
serializer_class = IssueAttachmentSerializer
@ -48,7 +48,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -67,7 +67,7 @@ class IssueAttachmentEndpoint(BaseAPIView):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(status=status.HTTP_204_NO_CONTENT)
@ -155,7 +155,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(status=status.HTTP_204_NO_CONTENT)
@ -213,7 +213,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# Update the attachment

View file

@ -45,6 +45,7 @@ from plane.db.models import (
ProjectMember,
CycleIssue,
UserRecentVisit,
ModuleIssue,
)
from plane.utils.grouper import (
issue_group_values,
@ -60,6 +61,7 @@ from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.global_paginator import paginate
from plane.bgtasks.webhook_task import model_activity
from plane.bgtasks.issue_description_version_task import issue_description_version_task
from plane.utils.host import base_host
class IssueListEndpoint(BaseAPIView):
@ -378,7 +380,7 @@ class IssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue = (
issue_queryset_grouper(
@ -428,7 +430,7 @@ class IssueViewSet(BaseViewSet):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# updated issue description version
issue_description_version_task.delay(
@ -564,7 +566,7 @@ class IssueViewSet(BaseViewSet):
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_400_BAD_REQUEST,
status=status.HTTP_403_FORBIDDEN,
)
recent_visited_task.delay(
@ -631,7 +633,7 @@ class IssueViewSet(BaseViewSet):
)
current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
IssueDetailSerializer(issue).data, cls=DjangoJSONEncoder
)
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
@ -649,7 +651,7 @@ class IssueViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
model_activity.delay(
model_name="issue",
@ -658,7 +660,7 @@ class IssueViewSet(BaseViewSet):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
# updated issue description version
issue_description_version_task.delay(
@ -690,7 +692,8 @@ class IssueViewSet(BaseViewSet):
current_instance={},
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
subscriber=False,
)
return Response(status=status.HTTP_204_NO_CONTENT)
@ -738,6 +741,13 @@ class BulkDeleteIssuesEndpoint(BaseAPIView):
total_issues = len(issues)
# First, delete all related cycle issues
CycleIssue.objects.filter(issue_id__in=issue_ids).delete()
# Then, delete all related module issues
ModuleIssue.objects.filter(issue_id__in=issue_ids).delete()
# Finally, delete the issues themselves
issues.delete()
return Response(
@ -1025,9 +1035,17 @@ class IssueBulkUpdateDateEndpoint(BaseAPIView):
"""
Validate that start date is before target date.
"""
from datetime import datetime
start = new_start or current_start
target = new_target or current_target
# Convert string dates to datetime objects if they're strings
if isinstance(start, str):
start = datetime.strptime(start, "%Y-%m-%d").date()
if isinstance(target, str):
target = datetime.strptime(target, "%Y-%m-%d").date()
if start and target and start > target:
return False
return True
@ -1269,7 +1287,7 @@ class IssueDetailIdentifierEndpoint(BaseAPIView):
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_400_BAD_REQUEST,
status=status.HTTP_403_FORBIDDEN,
)
recent_visited_task.delay(

View file

@ -17,7 +17,7 @@ from plane.app.serializers import IssueCommentSerializer, CommentReactionSeriali
from plane.app.permissions import allow_permission, ROLE
from plane.db.models import IssueComment, ProjectMember, CommentReaction, Project, Issue
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.host import base_host
class IssueCommentViewSet(BaseViewSet):
serializer_class = IssueCommentSerializer
@ -87,7 +87,7 @@ class IssueCommentViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -105,7 +105,13 @@ class IssueCommentViewSet(BaseViewSet):
issue_comment, data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
if (
"comment_html" in request.data
and request.data["comment_html"] != issue_comment.comment_html
):
serializer.save(edited_at=timezone.now())
else:
serializer.save()
issue_activity.delay(
type="comment.activity.updated",
requested_data=requested_data,
@ -115,7 +121,7 @@ class IssueCommentViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -138,7 +144,7 @@ class IssueCommentViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(status=status.HTTP_204_NO_CONTENT)
@ -182,7 +188,7 @@ class CommentReactionViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -216,7 +222,7 @@ class CommentReactionViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
comment_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -15,7 +15,7 @@ from plane.app.serializers import IssueLinkSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import IssueLink
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.host import base_host
class IssueLinkViewSet(BaseViewSet):
permission_classes = [ProjectEntityPermission]
@ -52,7 +52,7 @@ class IssueLinkViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -77,7 +77,7 @@ class IssueLinkViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -98,7 +98,7 @@ class IssueLinkViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue_link.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -15,7 +15,7 @@ from plane.app.serializers import IssueReactionSerializer
from plane.app.permissions import allow_permission, ROLE
from plane.db.models import IssueReaction
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.host import base_host
class IssueReactionViewSet(BaseViewSet):
serializer_class = IssueReactionSerializer
@ -53,7 +53,7 @@ class IssueReactionViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -78,7 +78,7 @@ class IssueReactionViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
issue_reaction.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -27,7 +27,7 @@ from plane.db.models import (
)
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_relation_mapper import get_actual_relation
from plane.utils.host import base_host
class IssueRelationViewSet(BaseViewSet):
serializer_class = IssueRelationSerializer
@ -253,7 +253,7 @@ class IssueRelationViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
if relation_type in ["blocking", "start_after", "finish_after"]:
@ -290,6 +290,6 @@ class IssueRelationViewSet(BaseViewSet):
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -22,7 +22,7 @@ from plane.db.models import Issue, IssueLink, FileAsset, CycleIssue
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.timezone_converter import user_timezone_converter
from collections import defaultdict
from plane.utils.host import base_host
class SubIssuesEndpoint(BaseAPIView):
permission_classes = [ProjectEntityPermission]
@ -176,7 +176,7 @@ class SubIssuesEndpoint(BaseAPIView):
current_instance=json.dumps({"parent": str(sub_issue_id)}),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
for sub_issue_id in sub_issue_ids
]

View file

@ -3,7 +3,13 @@ from rest_framework import status
from rest_framework.response import Response
# Module imports
from plane.db.models import IssueVersion, IssueDescriptionVersion
from plane.db.models import (
IssueVersion,
IssueDescriptionVersion,
Project,
ProjectMember,
Issue,
)
from ..base import BaseAPIView
from plane.app.serializers import (
IssueVersionDetailSerializer,
@ -66,7 +72,7 @@ class IssueVersionEndpoint(BaseAPIView):
return Response(paginated_data, status=status.HTTP_200_OK)
class IssueDescriptionVersionEndpoint(BaseAPIView):
class WorkItemDescriptionVersionEndpoint(BaseAPIView):
def process_paginated_result(self, fields, results, timezone):
paginated_data = results.values(*fields)
@ -78,10 +84,34 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
return paginated_data
@allow_permission(allowed_roles=[ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id, issue_id, pk=None):
def get(self, request, slug, project_id, work_item_id, pk=None):
project = Project.objects.get(pk=project_id)
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=work_item_id
)
if (
ProjectMember.objects.filter(
workspace__slug=slug,
project_id=project_id,
member=request.user,
role=ROLE.GUEST.value,
is_active=True,
).exists()
and not project.guest_view_all_features
and not issue.created_by == request.user
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_403_FORBIDDEN,
)
if pk:
issue_description_version = IssueDescriptionVersion.objects.get(
workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk
workspace__slug=slug,
project_id=project_id,
issue_id=work_item_id,
pk=pk,
)
serializer = IssueDescriptionVersionDetailSerializer(
@ -105,8 +135,8 @@ class IssueDescriptionVersionEndpoint(BaseAPIView):
]
issue_description_versions_queryset = IssueDescriptionVersion.objects.filter(
workspace__slug=slug, project_id=project_id, issue_id=issue_id
)
workspace__slug=slug, project_id=project_id, issue_id=work_item_id
).order_by("-created_at")
paginated_data = paginate(
base_queryset=issue_description_versions_queryset,
queryset=issue_description_versions_queryset,

View file

@ -61,7 +61,7 @@ from plane.utils.timezone_converter import user_timezone_converter
from plane.bgtasks.webhook_task import model_activity
from .. import BaseAPIView, BaseViewSet
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.host import base_host
class ModuleViewSet(BaseViewSet):
model = Module
@ -376,7 +376,7 @@ class ModuleViewSet(BaseViewSet):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
datetime_fields = ["created_at", "updated_at"]
module = user_timezone_converter(
@ -768,7 +768,7 @@ class ModuleViewSet(BaseViewSet):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
datetime_fields = ["created_at", "updated_at"]
@ -795,7 +795,7 @@ class ModuleViewSet(BaseViewSet):
current_instance=json.dumps({"module_name": str(module.name)}),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
for issue in module_issues
]

View file

@ -34,7 +34,7 @@ from plane.utils.paginator import GroupedOffsetPaginator, SubGroupedOffsetPagina
# Module imports
from .. import BaseViewSet
from plane.utils.host import base_host
class ModuleIssueViewSet(BaseViewSet):
serializer_class = ModuleIssueSerializer
@ -221,7 +221,7 @@ class ModuleIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
for issue in issues
]
@ -261,7 +261,7 @@ class ModuleIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
for module in modules
]
@ -284,7 +284,7 @@ class ModuleIssueViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
module_issue.delete()
@ -309,7 +309,7 @@ class ModuleIssueViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
module_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -39,6 +39,7 @@ from plane.utils.cache import cache_response
from plane.bgtasks.webhook_task import model_activity, webhook_activity
from plane.bgtasks.recent_visited_task import recent_visited_task
from plane.utils.exception_logger import log_exception
from plane.utils.host import base_host
class ProjectViewSet(BaseViewSet):
@ -179,6 +180,7 @@ class ProjectViewSet(BaseViewSet):
"inbox_view",
"guest_view_all_features",
"project_lead",
"network",
"created_at",
"updated_at",
"created_by",
@ -330,7 +332,7 @@ class ProjectViewSet(BaseViewSet):
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
serializer = ProjectListSerializer(project)
@ -340,7 +342,7 @@ class ProjectViewSet(BaseViewSet):
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
except Workspace.DoesNotExist:
return Response(
@ -349,7 +351,7 @@ class ProjectViewSet(BaseViewSet):
except serializers.ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
def partial_update(self, request, slug, pk=None):
@ -408,7 +410,7 @@ class ProjectViewSet(BaseViewSet):
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
serializer = ProjectListSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK)
@ -418,7 +420,7 @@ class ProjectViewSet(BaseViewSet):
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response(
@ -427,7 +429,7 @@ class ProjectViewSet(BaseViewSet):
except serializers.ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
def destroy(self, request, slug, pk):
@ -453,7 +455,7 @@ class ProjectViewSet(BaseViewSet):
new_value=None,
actor_id=request.user.id,
slug=slug,
current_site=request.META.get("HTTP_ORIGIN"),
current_site=base_host(request=request, is_app=True),
event_id=project.id,
old_identifier=None,
new_identifier=None,

View file

@ -16,18 +16,18 @@ from rest_framework.permissions import AllowAny
# Module imports
from .base import BaseViewSet, BaseAPIView
from plane.app.serializers import ProjectMemberInviteSerializer
from plane.app.permissions import allow_permission, ROLE
from plane.db.models import (
ProjectMember,
Workspace,
ProjectMemberInvite,
User,
WorkspaceMember,
Project,
IssueUserProperty,
)
from plane.db.models.project import ProjectNetwork
from plane.utils.host import base_host
class ProjectInvitationsViewset(BaseViewSet):
serializer_class = ProjectMemberInviteSerializer
@ -99,7 +99,7 @@ class ProjectInvitationsViewset(BaseViewSet):
project_invitations = ProjectMemberInvite.objects.bulk_create(
project_invitations, batch_size=10, ignore_conflicts=True
)
current_site = request.META.get("HTTP_ORIGIN")
current_site = base_host(request=request, is_app=True)
# Send invitations
for invitation in project_invitations:
@ -128,6 +128,7 @@ class UserProjectInvitationsViewset(BaseViewSet):
.select_related("workspace", "workspace__owner", "project")
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER], level="WORKSPACE")
def create(self, request, slug):
project_ids = request.data.get("project_ids", [])
@ -136,11 +137,20 @@ class UserProjectInvitationsViewset(BaseViewSet):
member=request.user, workspace__slug=slug, is_active=True
)
if workspace_member.role not in [ROLE.ADMIN.value, ROLE.MEMBER.value]:
return Response(
{"error": "You do not have permission to join the project"},
status=status.HTTP_403_FORBIDDEN,
)
# Get all the projects
projects = Project.objects.filter(
id__in=project_ids, workspace__slug=slug
).only("id", "network")
# Check if user has permission to join each project
for project in projects:
if (
project.network == ProjectNetwork.SECRET.value
and workspace_member.role != ROLE.ADMIN.value
):
return Response(
{"error": "Only workspace admins can join private project"},
status=status.HTTP_403_FORBIDDEN,
)
workspace_role = workspace_member.role
workspace = workspace_member.workspace

View file

@ -10,11 +10,7 @@ from plane.app.serializers import (
ProjectMemberRoleSerializer,
)
from plane.app.permissions import (
ProjectMemberPermission,
ProjectLitePermission,
WorkspaceUserPermission,
)
from plane.app.permissions import WorkspaceUserPermission
from plane.db.models import Project, ProjectMember, IssueUserProperty, WorkspaceMember
from plane.bgtasks.project_add_user_email_task import project_add_user_email
@ -26,14 +22,6 @@ class ProjectMemberViewSet(BaseViewSet):
serializer_class = ProjectMemberAdminSerializer
model = ProjectMember
def get_permissions(self):
if self.action == "leave":
self.permission_classes = [ProjectLitePermission]
else:
self.permission_classes = [ProjectMemberPermission]
return super(ProjectMemberViewSet, self).get_permissions()
search_fields = ["member__display_name", "member__first_name"]
def get_queryset(self):
@ -187,12 +175,20 @@ class ProjectMemberViewSet(BaseViewSet):
)
return Response(serializer.data, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN])
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def partial_update(self, request, slug, project_id, pk):
project_member = ProjectMember.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id, is_active=True
)
if request.user.id == project_member.member_id:
# Fetch the workspace role of the project member
workspace_role = WorkspaceMember.objects.get(
workspace__slug=slug, member=project_member.member, is_active=True
).role
is_workspace_admin = workspace_role == ROLE.ADMIN.value
# Check if the user is not editing their own role if they are not an admin
if request.user.id == project_member.member_id and not is_workspace_admin:
return Response(
{"error": "You cannot update your own role"},
status=status.HTTP_400_BAD_REQUEST,
@ -205,9 +201,6 @@ class ProjectMemberViewSet(BaseViewSet):
is_active=True,
)
workspace_role = WorkspaceMember.objects.get(
workspace__slug=slug, member=project_member.member, is_active=True
).role
if workspace_role in [5] and int(
request.data.get("role", project_member.role)
) in [15, 20]:
@ -222,6 +215,7 @@ class ProjectMemberViewSet(BaseViewSet):
"role" in request.data
and int(request.data.get("role", project_member.role))
> requested_project_member.role
and not is_workspace_admin
):
return Response(
{"error": "You cannot update a role that is higher than your own role"},

View file

@ -21,221 +21,161 @@ class TimezoneEndpoint(APIView):
throttle_classes = [AuthenticationThrottle]
@method_decorator(cache_page(60 * 60 * 24))
@method_decorator(cache_page(60 * 60 * 2))
def get(self, request):
timezone_mapping = {
"-1100": [
("Midway Island", "Pacific/Midway"),
("American Samoa", "Pacific/Pago_Pago"),
],
"-1000": [
("Hawaii", "Pacific/Honolulu"),
("Aleutian Islands", "America/Adak"),
],
"-0930": [("Marquesas Islands", "Pacific/Marquesas")],
"-0900": [
("Alaska", "America/Anchorage"),
("Gambier Islands", "Pacific/Gambier"),
],
"-0800": [
("Pacific Time (US and Canada)", "America/Los_Angeles"),
("Baja California", "America/Tijuana"),
],
"-0700": [
("Mountain Time (US and Canada)", "America/Denver"),
("Arizona", "America/Phoenix"),
("Chihuahua, Mazatlan", "America/Chihuahua"),
],
"-0600": [
("Central Time (US and Canada)", "America/Chicago"),
("Saskatchewan", "America/Regina"),
("Guadalajara, Mexico City, Monterrey", "America/Mexico_City"),
("Tegucigalpa, Honduras", "America/Tegucigalpa"),
("Costa Rica", "America/Costa_Rica"),
],
"-0500": [
("Eastern Time (US and Canada)", "America/New_York"),
("Lima", "America/Lima"),
("Bogota", "America/Bogota"),
("Quito", "America/Guayaquil"),
("Chetumal", "America/Cancun"),
],
"-0430": [("Caracas (Old Venezuela Time)", "America/Caracas")],
"-0400": [
("Atlantic Time (Canada)", "America/Halifax"),
("Caracas", "America/Caracas"),
("Santiago", "America/Santiago"),
("La Paz", "America/La_Paz"),
("Manaus", "America/Manaus"),
("Georgetown", "America/Guyana"),
("Bermuda", "Atlantic/Bermuda"),
],
"-0330": [("Newfoundland Time (Canada)", "America/St_Johns")],
"-0300": [
("Buenos Aires", "America/Argentina/Buenos_Aires"),
("Brasilia", "America/Sao_Paulo"),
("Greenland", "America/Godthab"),
("Montevideo", "America/Montevideo"),
("Falkland Islands", "Atlantic/Stanley"),
],
"-0200": [
(
"South Georgia and the South Sandwich Islands",
"Atlantic/South_Georgia",
)
],
"-0100": [
("Azores", "Atlantic/Azores"),
("Cape Verde Islands", "Atlantic/Cape_Verde"),
],
"+0000": [
("Dublin", "Europe/Dublin"),
("Reykjavik", "Atlantic/Reykjavik"),
("Lisbon", "Europe/Lisbon"),
("Monrovia", "Africa/Monrovia"),
("Casablanca", "Africa/Casablanca"),
],
"+0100": [
("Central European Time (Berlin, Rome, Paris)", "Europe/Paris"),
("West Central Africa", "Africa/Lagos"),
("Algiers", "Africa/Algiers"),
("Lagos", "Africa/Lagos"),
("Tunis", "Africa/Tunis"),
],
"+0200": [
("Eastern European Time (Cairo, Helsinki, Kyiv)", "Europe/Kiev"),
("Athens", "Europe/Athens"),
("Jerusalem", "Asia/Jerusalem"),
("Johannesburg", "Africa/Johannesburg"),
("Harare, Pretoria", "Africa/Harare"),
],
"+0300": [
("Moscow Time", "Europe/Moscow"),
("Baghdad", "Asia/Baghdad"),
("Nairobi", "Africa/Nairobi"),
("Kuwait, Riyadh", "Asia/Riyadh"),
],
"+0330": [("Tehran", "Asia/Tehran")],
"+0400": [
("Abu Dhabi", "Asia/Dubai"),
("Baku", "Asia/Baku"),
("Yerevan", "Asia/Yerevan"),
("Astrakhan", "Europe/Astrakhan"),
("Tbilisi", "Asia/Tbilisi"),
("Mauritius", "Indian/Mauritius"),
],
"+0500": [
("Islamabad", "Asia/Karachi"),
("Karachi", "Asia/Karachi"),
("Tashkent", "Asia/Tashkent"),
("Yekaterinburg", "Asia/Yekaterinburg"),
("Maldives", "Indian/Maldives"),
("Chagos", "Indian/Chagos"),
],
"+0530": [
("Chennai", "Asia/Kolkata"),
("Kolkata", "Asia/Kolkata"),
("Mumbai", "Asia/Kolkata"),
("New Delhi", "Asia/Kolkata"),
("Sri Jayawardenepura", "Asia/Colombo"),
],
"+0545": [("Kathmandu", "Asia/Kathmandu")],
"+0600": [
("Dhaka", "Asia/Dhaka"),
("Almaty", "Asia/Almaty"),
("Bishkek", "Asia/Bishkek"),
("Thimphu", "Asia/Thimphu"),
],
"+0630": [
("Yangon (Rangoon)", "Asia/Yangon"),
("Cocos Islands", "Indian/Cocos"),
],
"+0700": [
("Bangkok", "Asia/Bangkok"),
("Hanoi", "Asia/Ho_Chi_Minh"),
("Jakarta", "Asia/Jakarta"),
("Novosibirsk", "Asia/Novosibirsk"),
("Krasnoyarsk", "Asia/Krasnoyarsk"),
],
"+0800": [
("Beijing", "Asia/Shanghai"),
("Singapore", "Asia/Singapore"),
("Perth", "Australia/Perth"),
("Hong Kong", "Asia/Hong_Kong"),
("Ulaanbaatar", "Asia/Ulaanbaatar"),
("Palau", "Pacific/Palau"),
],
"+0845": [("Eucla", "Australia/Eucla")],
"+0900": [
("Tokyo", "Asia/Tokyo"),
("Seoul", "Asia/Seoul"),
("Yakutsk", "Asia/Yakutsk"),
],
"+0930": [
("Adelaide", "Australia/Adelaide"),
("Darwin", "Australia/Darwin"),
],
"+1000": [
("Sydney", "Australia/Sydney"),
("Brisbane", "Australia/Brisbane"),
("Guam", "Pacific/Guam"),
("Vladivostok", "Asia/Vladivostok"),
("Tahiti", "Pacific/Tahiti"),
],
"+1030": [("Lord Howe Island", "Australia/Lord_Howe")],
"+1100": [
("Solomon Islands", "Pacific/Guadalcanal"),
("Magadan", "Asia/Magadan"),
("Norfolk Island", "Pacific/Norfolk"),
("Bougainville Island", "Pacific/Bougainville"),
("Chokurdakh", "Asia/Srednekolymsk"),
],
"+1200": [
("Auckland", "Pacific/Auckland"),
("Wellington", "Pacific/Auckland"),
("Fiji Islands", "Pacific/Fiji"),
("Anadyr", "Asia/Anadyr"),
],
"+1245": [("Chatham Islands", "Pacific/Chatham")],
"+1300": [("Nuku'alofa", "Pacific/Tongatapu"), ("Samoa", "Pacific/Apia")],
"+1400": [("Kiritimati Island", "Pacific/Kiritimati")],
}
timezone_locations = [
('Midway Island', 'Pacific/Midway'), # UTC-11:00
('American Samoa', 'Pacific/Pago_Pago'), # UTC-11:00
('Hawaii', 'Pacific/Honolulu'), # UTC-10:00
('Aleutian Islands', 'America/Adak'), # UTC-10:00 (DST: UTC-09:00)
('Marquesas Islands', 'Pacific/Marquesas'), # UTC-09:30
('Alaska', 'America/Anchorage'), # UTC-09:00 (DST: UTC-08:00)
('Gambier Islands', 'Pacific/Gambier'), # UTC-09:00
('Pacific Time (US and Canada)', 'America/Los_Angeles'), # UTC-08:00 (DST: UTC-07:00)
('Baja California', 'America/Tijuana'), # UTC-08:00 (DST: UTC-07:00)
('Mountain Time (US and Canada)', 'America/Denver'), # UTC-07:00 (DST: UTC-06:00)
('Arizona', 'America/Phoenix'), # UTC-07:00
('Chihuahua, Mazatlan', 'America/Chihuahua'), # UTC-07:00 (DST: UTC-06:00)
('Central Time (US and Canada)', 'America/Chicago'), # UTC-06:00 (DST: UTC-05:00)
('Saskatchewan', 'America/Regina'), # UTC-06:00
('Guadalajara, Mexico City, Monterrey', 'America/Mexico_City'), # UTC-06:00 (DST: UTC-05:00)
('Tegucigalpa, Honduras', 'America/Tegucigalpa'), # UTC-06:00
('Costa Rica', 'America/Costa_Rica'), # UTC-06:00
('Eastern Time (US and Canada)', 'America/New_York'), # UTC-05:00 (DST: UTC-04:00)
('Lima', 'America/Lima'), # UTC-05:00
('Bogota', 'America/Bogota'), # UTC-05:00
('Quito', 'America/Guayaquil'), # UTC-05:00
('Chetumal', 'America/Cancun'), # UTC-05:00 (DST: UTC-04:00)
('Caracas (Old Venezuela Time)', 'America/Caracas'), # UTC-04:30
('Atlantic Time (Canada)', 'America/Halifax'), # UTC-04:00 (DST: UTC-03:00)
('Caracas', 'America/Caracas'), # UTC-04:00
('Santiago', 'America/Santiago'), # UTC-04:00 (DST: UTC-03:00)
('La Paz', 'America/La_Paz'), # UTC-04:00
('Manaus', 'America/Manaus'), # UTC-04:00
('Georgetown', 'America/Guyana'), # UTC-04:00
('Bermuda', 'Atlantic/Bermuda'), # UTC-04:00 (DST: UTC-03:00)
('Newfoundland Time (Canada)', 'America/St_Johns'), # UTC-03:30 (DST: UTC-02:30)
('Buenos Aires', 'America/Argentina/Buenos_Aires'), # UTC-03:00
('Brasilia', 'America/Sao_Paulo'), # UTC-03:00
('Greenland', 'America/Godthab'), # UTC-03:00 (DST: UTC-02:00)
('Montevideo', 'America/Montevideo'), # UTC-03:00
('Falkland Islands', 'Atlantic/Stanley'), # UTC-03:00
('South Georgia and the South Sandwich Islands', 'Atlantic/South_Georgia'), # UTC-02:00
('Azores', 'Atlantic/Azores'), # UTC-01:00 (DST: UTC+00:00)
('Cape Verde Islands', 'Atlantic/Cape_Verde'), # UTC-01:00
('Dublin', 'Europe/Dublin'), # UTC+00:00 (DST: UTC+01:00)
('Reykjavik', 'Atlantic/Reykjavik'), # UTC+00:00
('Lisbon', 'Europe/Lisbon'), # UTC+00:00 (DST: UTC+01:00)
('Monrovia', 'Africa/Monrovia'), # UTC+00:00
('Casablanca', 'Africa/Casablanca'), # UTC+00:00 (DST: UTC+01:00)
('Central European Time (Berlin, Rome, Paris)', 'Europe/Paris'), # UTC+01:00 (DST: UTC+02:00)
('West Central Africa', 'Africa/Lagos'), # UTC+01:00
('Algiers', 'Africa/Algiers'), # UTC+01:00
('Lagos', 'Africa/Lagos'), # UTC+01:00
('Tunis', 'Africa/Tunis'), # UTC+01:00
('Eastern European Time (Cairo, Helsinki, Kyiv)', 'Europe/Kiev'), # UTC+02:00 (DST: UTC+03:00)
('Athens', 'Europe/Athens'), # UTC+02:00 (DST: UTC+03:00)
('Jerusalem', 'Asia/Jerusalem'), # UTC+02:00 (DST: UTC+03:00)
('Johannesburg', 'Africa/Johannesburg'), # UTC+02:00
('Harare, Pretoria', 'Africa/Harare'), # UTC+02:00
('Moscow Time', 'Europe/Moscow'), # UTC+03:00
('Baghdad', 'Asia/Baghdad'), # UTC+03:00
('Nairobi', 'Africa/Nairobi'), # UTC+03:00
('Kuwait, Riyadh', 'Asia/Riyadh'), # UTC+03:00
('Tehran', 'Asia/Tehran'), # UTC+03:30 (DST: UTC+04:30)
('Abu Dhabi', 'Asia/Dubai'), # UTC+04:00
('Baku', 'Asia/Baku'), # UTC+04:00 (DST: UTC+05:00)
('Yerevan', 'Asia/Yerevan'), # UTC+04:00 (DST: UTC+05:00)
('Astrakhan', 'Europe/Astrakhan'), # UTC+04:00
('Tbilisi', 'Asia/Tbilisi'), # UTC+04:00
('Mauritius', 'Indian/Mauritius'), # UTC+04:00
('Islamabad', 'Asia/Karachi'), # UTC+05:00
('Karachi', 'Asia/Karachi'), # UTC+05:00
('Tashkent', 'Asia/Tashkent'), # UTC+05:00
('Yekaterinburg', 'Asia/Yekaterinburg'), # UTC+05:00
('Maldives', 'Indian/Maldives'), # UTC+05:00
('Chagos', 'Indian/Chagos'), # UTC+05:00
('Chennai', 'Asia/Kolkata'), # UTC+05:30
('Kolkata', 'Asia/Kolkata'), # UTC+05:30
('Mumbai', 'Asia/Kolkata'), # UTC+05:30
('New Delhi', 'Asia/Kolkata'), # UTC+05:30
('Sri Jayawardenepura', 'Asia/Colombo'), # UTC+05:30
('Kathmandu', 'Asia/Kathmandu'), # UTC+05:45
('Dhaka', 'Asia/Dhaka'), # UTC+06:00
('Almaty', 'Asia/Almaty'), # UTC+06:00
('Bishkek', 'Asia/Bishkek'), # UTC+06:00
('Thimphu', 'Asia/Thimphu'), # UTC+06:00
('Yangon (Rangoon)', 'Asia/Yangon'), # UTC+06:30
('Cocos Islands', 'Indian/Cocos'), # UTC+06:30
('Bangkok', 'Asia/Bangkok'), # UTC+07:00
('Hanoi', 'Asia/Ho_Chi_Minh'), # UTC+07:00
('Jakarta', 'Asia/Jakarta'), # UTC+07:00
('Novosibirsk', 'Asia/Novosibirsk'), # UTC+07:00
('Krasnoyarsk', 'Asia/Krasnoyarsk'), # UTC+07:00
('Beijing', 'Asia/Shanghai'), # UTC+08:00
('Singapore', 'Asia/Singapore'), # UTC+08:00
('Perth', 'Australia/Perth'), # UTC+08:00
('Hong Kong', 'Asia/Hong_Kong'), # UTC+08:00
('Ulaanbaatar', 'Asia/Ulaanbaatar'), # UTC+08:00
('Palau', 'Pacific/Palau'), # UTC+08:00
('Eucla', 'Australia/Eucla'), # UTC+08:45
('Tokyo', 'Asia/Tokyo'), # UTC+09:00
('Seoul', 'Asia/Seoul'), # UTC+09:00
('Yakutsk', 'Asia/Yakutsk'), # UTC+09:00
('Adelaide', 'Australia/Adelaide'), # UTC+09:30 (DST: UTC+10:30)
('Darwin', 'Australia/Darwin'), # UTC+09:30
('Sydney', 'Australia/Sydney'), # UTC+10:00 (DST: UTC+11:00)
('Brisbane', 'Australia/Brisbane'), # UTC+10:00
('Guam', 'Pacific/Guam'), # UTC+10:00
('Vladivostok', 'Asia/Vladivostok'), # UTC+10:00
('Tahiti', 'Pacific/Tahiti'), # UTC+10:00
('Lord Howe Island', 'Australia/Lord_Howe'), # UTC+10:30 (DST: UTC+11:00)
('Solomon Islands', 'Pacific/Guadalcanal'), # UTC+11:00
('Magadan', 'Asia/Magadan'), # UTC+11:00
('Norfolk Island', 'Pacific/Norfolk'), # UTC+11:00
('Bougainville Island', 'Pacific/Bougainville'), # UTC+11:00
('Chokurdakh', 'Asia/Srednekolymsk'), # UTC+11:00
('Auckland', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
('Wellington', 'Pacific/Auckland'), # UTC+12:00 (DST: UTC+13:00)
('Fiji Islands', 'Pacific/Fiji'), # UTC+12:00 (DST: UTC+13:00)
('Anadyr', 'Asia/Anadyr'), # UTC+12:00
('Chatham Islands', 'Pacific/Chatham'), # UTC+12:45 (DST: UTC+13:45)
("Nuku'alofa", 'Pacific/Tongatapu'), # UTC+13:00
('Samoa', 'Pacific/Apia'), # UTC+13:00 (DST: UTC+14:00)
('Kiritimati Island', 'Pacific/Kiritimati') # UTC+14:00
]
timezone_list = []
now = datetime.now()
# Process timezone mapping
for offset, locations in timezone_mapping.items():
sign = "-" if offset.startswith("-") else "+"
hours = offset[1:3]
minutes = offset[3:] if len(offset) > 3 else "00"
for friendly_name, tz_identifier in timezone_locations:
for friendly_name, tz_identifier in locations:
try:
tz = pytz.timezone(tz_identifier)
current_offset = now.astimezone(tz).strftime("%z")
try:
tz = pytz.timezone(tz_identifier)
current_offset = now.astimezone(tz).strftime("%z")
# converting and formatting UTC offset to GMT offset
current_utc_offset = now.astimezone(tz).utcoffset()
total_seconds = int(current_utc_offset.total_seconds())
hours_offset = total_seconds // 3600
minutes_offset = abs(total_seconds % 3600) // 60
gmt_offset = (
f"GMT{'+' if hours_offset >= 0 else '-'}"
f"{abs(hours_offset):02}:{minutes_offset:02}"
)
# converting and formatting UTC offset to GMT offset
current_utc_offset = now.astimezone(tz).utcoffset()
total_seconds = int(current_utc_offset.total_seconds())
hours_offset = total_seconds // 3600
minutes_offset = abs(total_seconds % 3600) // 60
offset = (
f"{'+' if hours_offset >= 0 else '-'}"
f"{abs(hours_offset):02}:{minutes_offset:02}"
)
timezone_value = {
"offset": int(current_offset),
"utc_offset": f"UTC{sign}{hours}:{minutes}",
"gmt_offset": gmt_offset,
"value": tz_identifier,
"label": f"{friendly_name}",
}
timezone_value = {
"offset": int(current_offset),
"utc_offset": f"UTC{offset}",
"gmt_offset": f"GMT{offset}",
"value": tz_identifier,
"label": f"{friendly_name}",
}
timezone_list.append(timezone_value)
except pytz.exceptions.UnknownTimeZoneError:
continue
timezone_list.append(timezone_value)
except pytz.exceptions.UnknownTimeZoneError:
continue
# Sort by offset and then by label
timezone_list.sort(key=lambda x: (x["offset"], x["label"]))

View file

@ -432,7 +432,7 @@ class IssueViewViewSet(BaseViewSet):
):
return Response(
{"error": "You are not allowed to view this issue"},
status=status.HTTP_400_BAD_REQUEST,
status=status.HTTP_403_FORBIDDEN,
)
serializer = IssueViewSerializer(issue_view)

View file

@ -29,7 +29,7 @@ class WebhookEndpoint(BaseAPIView):
if "already exists" in str(e):
return Response(
{"error": "URL already exists for the workspace"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
raise IntegrityError

View file

@ -119,7 +119,9 @@ class WorkSpaceViewSet(BaseViewSet):
)
# Get total members and role
total_members=WorkspaceMember.objects.filter(workspace_id=serializer.data["id"]).count()
total_members = WorkspaceMember.objects.filter(
workspace_id=serializer.data["id"]
).count()
data = serializer.data
data["total_members"] = total_members
data["role"] = 20
@ -134,7 +136,7 @@ class WorkSpaceViewSet(BaseViewSet):
if "already exists" in str(e):
return Response(
{"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST], level="WORKSPACE")
@ -167,10 +169,9 @@ class UserWorkSpacesEndpoint(BaseAPIView):
.values("count")
)
role = (
WorkspaceMember.objects.filter(workspace=OuterRef("id"), member=request.user, is_active=True)
.values("role")
)
role = WorkspaceMember.objects.filter(
workspace=OuterRef("id"), member=request.user, is_active=True
).values("role")
workspace = (
Workspace.objects.prefetch_related(

View file

@ -36,7 +36,7 @@ from plane.db.models import (
from .. import BaseViewSet
from plane.bgtasks.issue_activities_task import issue_activity
from plane.utils.issue_filters import issue_filters
from plane.utils.host import base_host
class WorkspaceDraftIssueViewSet(BaseViewSet):
model = DraftIssue
@ -241,7 +241,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
if request.data.get("cycle_id", None):
@ -270,7 +270,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
if request.data.get("module_ids", []):
@ -300,7 +300,7 @@ class WorkspaceDraftIssueViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
origin=base_host(request=request, is_app=True),
)
for module in request.data.get("module_ids", [])
]

View file

@ -7,7 +7,6 @@ import jwt
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.db.models import Count
from django.utils import timezone
# Third party modules
@ -26,7 +25,8 @@ from plane.bgtasks.event_tracking_task import workspace_invite_event
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.db.models import User, Workspace, WorkspaceMember, WorkspaceMemberInvite
from plane.utils.cache import invalidate_cache, invalidate_cache_directly
from plane.utils.host import base_host
from plane.utils.ip_address import get_client_ip
from .. import BaseViewSet
@ -122,7 +122,7 @@ class WorkspaceInvitationsViewset(BaseViewSet):
workspace_invitations, batch_size=10, ignore_conflicts=True
)
current_site = request.META.get("HTTP_ORIGIN")
current_site = base_host(request=request, is_app=True)
# Send invitations
for invitation in workspace_invitations:
@ -213,7 +213,7 @@ class WorkspaceJoinEndpoint(BaseAPIView):
user=user.id if user is not None else None,
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
ip=get_client_ip(request=request),
event_name="MEMBER_ACCEPTED",
accepted_from="EMAIL",
)

View file

@ -68,10 +68,11 @@ class WorkSpaceMemberViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST,
)
if workspace_member.role > int(request.data.get("role")):
_ = ProjectMember.objects.filter(
# If a user is moved to a guest role he can't have any other role in projects
if "role" in request.data and int(request.data.get("role")) == 5:
ProjectMember.objects.filter(
workspace__slug=slug, member_id=workspace_member.member_id
).update(role=int(request.data.get("role")))
).update(role=5)
serializer = WorkSpaceMemberSerializer(
workspace_member, data=request.data, partial=True

View file

@ -15,8 +15,8 @@ from plane.db.models import Profile, User, WorkspaceMemberInvite
from plane.license.utils.instance_value import get_configuration_value
from .error import AuthenticationException, AUTHENTICATION_ERROR_CODES
from plane.bgtasks.user_activation_email_task import user_activation_email
from plane.authentication.utils.host import base_host
from plane.utils.host import base_host
from plane.utils.ip_address import get_client_ip
class Adapter:
"""Common interface for all auth providers"""
@ -108,7 +108,7 @@ class Adapter:
user.last_login_medium = self.provider
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = self.request.META.get("REMOTE_ADDR")
user.last_login_ip = get_client_ip(request=self.request)
user.last_login_uagent = self.request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
# If user is not active, send the activation email and set the user as active

View file

@ -36,10 +36,12 @@ AUTHENTICATION_ERROR_CODES = {
"OAUTH_NOT_CONFIGURED": 5104,
"GOOGLE_NOT_CONFIGURED": 5105,
"GITHUB_NOT_CONFIGURED": 5110,
"GITHUB_USER_NOT_IN_ORG": 5122,
"GITLAB_NOT_CONFIGURED": 5111,
"GOOGLE_OAUTH_PROVIDER_ERROR": 5115,
"GITHUB_OAUTH_PROVIDER_ERROR": 5120,
"GITLAB_OAUTH_PROVIDER_ERROR": 5121,
# Reset Password
"INVALID_PASSWORD_TOKEN": 5125,
"EXPIRED_PASSWORD_TOKEN": 5130,

View file

@ -18,11 +18,16 @@ from plane.authentication.adapter.error import (
class GitHubOAuthProvider(OauthAdapter):
token_url = "https://github.com/login/oauth/access_token"
userinfo_url = "https://api.github.com/user"
org_membership_url = f"https://api.github.com/orgs"
provider = "github"
scope = "read:user user:email"
organization_scope = "read:org"
def __init__(self, request, code=None, state=None, callback=None):
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET = get_configuration_value(
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value(
[
{
"key": "GITHUB_CLIENT_ID",
@ -32,6 +37,10 @@ class GitHubOAuthProvider(OauthAdapter):
"key": "GITHUB_CLIENT_SECRET",
"default": os.environ.get("GITHUB_CLIENT_SECRET"),
},
{
"key": "GITHUB_ORGANIZATION_ID",
"default": os.environ.get("GITHUB_ORGANIZATION_ID"),
},
]
)
@ -43,6 +52,10 @@ class GitHubOAuthProvider(OauthAdapter):
client_id = GITHUB_CLIENT_ID
client_secret = GITHUB_CLIENT_SECRET
self.organization_id = GITHUB_ORGANIZATION_ID
if self.organization_id:
self.scope += f" {self.organization_scope}"
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
url_params = {
@ -113,12 +126,26 @@ class GitHubOAuthProvider(OauthAdapter):
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
)
def is_user_in_organization(self, github_username):
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
response = requests.get(f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}", headers=headers)
return response.status_code == 200 # 200 means the user is a member
def set_user_data(self):
user_info_response = self.get_user_response()
headers = {
"Authorization": f"Bearer {self.token_data.get('access_token')}",
"Accept": "application/json",
}
if self.organization_id:
if not self.is_user_in_organization(user_info_response.get("login")):
raise AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_USER_NOT_IN_ORG"],
error_message="GITHUB_USER_NOT_IN_ORG",
)
email = self.__get_email(headers=headers)
super().set_user_data(
{

View file

@ -1,18 +1,16 @@
# Python imports
from urllib.parse import urlsplit
# Django imports
from django.conf import settings
from django.http import HttpRequest
# Third party imports
from rest_framework.request import Request
# Module imports
from plane.utils.ip_address import get_client_ip
def base_host(request, is_admin=False, is_space=False, is_app=False):
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
"""Utility function to return host / origin from the request"""
# Calculate the base origin from request
base_origin = str(
request.META.get("HTTP_ORIGIN")
or f"{urlsplit(request.META.get('HTTP_REFERER')).scheme}://{urlsplit(request.META.get('HTTP_REFERER')).netloc}"
or f"""{"https" if request.is_secure() else "http"}://{request.get_host()}"""
)
base_origin = settings.WEB_URL or settings.APP_BASE_URL
# Admin redirections
if is_admin:
@ -38,5 +36,5 @@ def base_host(request, is_admin=False, is_space=False, is_app=False):
return base_origin
def user_ip(request):
return str(request.META.get("REMOTE_ADDR"))
def user_ip(request: Request | HttpRequest) -> str:
return get_client_ip(request=request)

View file

@ -3,8 +3,8 @@ from django.contrib.auth import login
from django.conf import settings
# Module imports
from plane.authentication.utils.host import base_host
from plane.utils.host import base_host
from plane.utils.ip_address import get_client_ip
def user_login(request, user, is_app=False, is_admin=False, is_space=False):
login(request=request, user=user)
@ -15,7 +15,7 @@ def user_login(request, user, is_app=False, is_admin=False, is_space=False):
device_info = {
"user_agent": request.META.get("HTTP_USER_AGENT", ""),
"ip_address": request.META.get("REMOTE_ADDR", ""),
"ip_address": get_client_ip(request=request),
"domain": base_host(
request=request, is_app=is_app, is_admin=is_admin, is_space=is_space
),

View file

@ -19,7 +19,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class SignInAuthEndpoint(View):
def post(self, request):
@ -34,7 +34,7 @@ class SignInAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
# Base URL join
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
@ -58,7 +58,7 @@ class SignInAuthEndpoint(View):
params = exc.get_error_dict()
# Next path
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -76,7 +76,7 @@ class SignInAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -92,7 +92,7 @@ class SignInAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -111,7 +111,7 @@ class SignInAuthEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(next_path)
path = str(validate_next_path(next_path))
else:
path = get_redirection_path(user=user)
@ -121,7 +121,7 @@ class SignInAuthEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -141,7 +141,7 @@ class SignUpAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -161,7 +161,7 @@ class SignUpAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -179,7 +179,7 @@ class SignUpAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -197,7 +197,7 @@ class SignUpAuthEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -216,7 +216,7 @@ class SignUpAuthEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = next_path
path = str(validate_next_path(next_path))
else:
path = get_redirection_path(user=user)
# redirect to referer path
@ -225,7 +225,7 @@ class SignUpAuthEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)

View file

@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class GitHubOauthInitiateEndpoint(View):
def get(self, request):
@ -35,7 +35,7 @@ class GitHubOauthInitiateEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -49,7 +49,7 @@ class GitHubOauthInitiateEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -70,7 +70,7 @@ class GitHubCallbackEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)
@ -81,7 +81,7 @@ class GitHubCallbackEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)
@ -94,7 +94,7 @@ class GitHubCallbackEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = next_path
path = str(validate_next_path(next_path))
else:
path = get_redirection_path(user=user)
# redirect to referer path
@ -103,6 +103,6 @@ class GitHubCallbackEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)

View file

@ -16,7 +16,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class GitLabOauthInitiateEndpoint(View):
def get(self, request):
@ -24,7 +24,7 @@ class GitLabOauthInitiateEndpoint(View):
request.session["host"] = base_host(request=request, is_app=True)
next_path = request.GET.get("next_path")
if next_path:
request.session["next_path"] = str(next_path)
request.session["next_path"] = str(validate_next_path(next_path))
# Check instance configuration
instance = Instance.objects.first()
@ -35,7 +35,7 @@ class GitLabOauthInitiateEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -49,7 +49,7 @@ class GitLabOauthInitiateEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -81,7 +81,7 @@ class GitLabCallbackEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)
@ -94,7 +94,7 @@ class GitLabCallbackEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = next_path
path = str(validate_next_path(next_path))
else:
path = get_redirection_path(user=user)
# redirect to referer path
@ -103,6 +103,6 @@ class GitLabCallbackEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)

View file

@ -18,7 +18,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class GoogleOauthInitiateEndpoint(View):
def get(self, request):
@ -36,7 +36,7 @@ class GoogleOauthInitiateEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -51,7 +51,7 @@ class GoogleOauthInitiateEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -72,7 +72,7 @@ class GoogleCallbackEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)
if not code:
@ -82,7 +82,7 @@ class GoogleCallbackEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = next_path
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)
try:
@ -95,11 +95,11 @@ class GoogleCallbackEndpoint(View):
# Get the redirection path
path = get_redirection_path(user=user)
# redirect to referer path
url = urljoin(base_host, str(next_path) if next_path else path)
url = urljoin(base_host, str(validate_next_path(next_path)) if next_path else path)
return HttpResponseRedirect(url)
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(base_host, "?" + urlencode(params))
return HttpResponseRedirect(url)

View file

@ -26,6 +26,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
)
from plane.authentication.rate_limit import AuthenticationThrottle
from plane.utils.path_validator import validate_next_path
class MagicGenerateEndpoint(APIView):
@ -43,14 +44,13 @@ class MagicGenerateEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
origin = request.META.get("HTTP_ORIGIN", "/")
email = request.data.get("email", "").strip().lower()
try:
validate_email(email)
adapter = MagicCodeProvider(request=request, key=email)
key, token = adapter.initiate()
# If the smtp is configured send through here
magic_link.delay(email, key, token, origin)
magic_link.delay(email, key, token)
return Response({"key": str(key)}, status=status.HTTP_200_OK)
except AuthenticationException as e:
params = e.get_error_dict()
@ -73,7 +73,7 @@ class MagicSignInEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -89,7 +89,7 @@ class MagicSignInEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -122,7 +122,7 @@ class MagicSignInEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "sign-in?" + urlencode(params)
)
@ -145,7 +145,7 @@ class MagicSignUpEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -159,7 +159,7 @@ class MagicSignUpEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)
@ -177,7 +177,7 @@ class MagicSignUpEndpoint(View):
user_login(request=request, user=user, is_app=True)
# Get the redirection path
if next_path:
path = str(next_path)
path = str(validate_next_path(next_path))
else:
path = get_redirection_path(user=user)
# redirect to referer path
@ -187,7 +187,7 @@ class MagicSignUpEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = urljoin(
base_host(request=request, is_app=True), "?" + urlencode(params)
)

View file

@ -80,7 +80,7 @@ class ForgotPasswordEndpoint(APIView):
if user:
# Get the reset token for user
uidb64, token = generate_password_token(user=user)
current_site = request.META.get("HTTP_ORIGIN")
current_site = base_host(request=request, is_app=True)
# send the forgot password email
forgot_password.delay(
user.first_name, user.email, uidb64, token, current_site

View file

@ -44,10 +44,21 @@ class ChangePasswordEndpoint(APIView):
def post(self, request):
user = User.objects.get(pk=request.user.id)
old_password = request.data.get("old_password", False)
# If the user password is not autoset then we need to check the old passwords
if not user.is_password_autoset:
old_password = request.data.get("old_password", False)
if not old_password:
exc = AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
error_message="MISSING_PASSWORD",
payload={"error": "Old password is missing"},
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
# Get the new password
new_password = request.data.get("new_password", False)
if not old_password or not new_password:
if not new_password:
exc = AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["MISSING_PASSWORD"],
error_message="MISSING_PASSWORD",
@ -55,7 +66,9 @@ class ChangePasswordEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
if not user.check_password(old_password):
# If the user password is not autoset then we need to check the old passwords
if not user.is_password_autoset and not user.check_password(old_password):
exc = AuthenticationException(
error_code=AUTHENTICATION_ERROR_CODES["INCORRECT_OLD_PASSWORD"],
error_message="INCORRECT_OLD_PASSWORD",

View file

@ -17,6 +17,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
class SignInAuthSpaceEndpoint(View):
@ -32,7 +33,7 @@ class SignInAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -51,7 +52,7 @@ class SignInAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -67,7 +68,7 @@ class SignInAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -82,7 +83,7 @@ class SignInAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -99,7 +100,7 @@ class SignInAuthSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -117,7 +118,7 @@ class SignUpAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -135,7 +136,7 @@ class SignUpAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
# Validate the email
@ -151,7 +152,7 @@ class SignUpAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -166,7 +167,7 @@ class SignUpAuthSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -183,6 +184,6 @@ class SignUpAuthSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)

View file

@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
class GitHubOauthInitiateSpaceEndpoint(View):
@ -34,7 +35,7 @@ class GitHubOauthInitiateSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -66,7 +67,7 @@ class GitHubCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -77,7 +78,7 @@ class GitHubCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -93,6 +94,6 @@ class GitHubCallbackSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)

View file

@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.path_validator import validate_next_path
class GitLabOauthInitiateSpaceEndpoint(View):
@ -34,7 +35,7 @@ class GitLabOauthInitiateSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -66,7 +67,7 @@ class GitLabCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -77,7 +78,7 @@ class GitLabCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -93,6 +94,6 @@ class GitLabCallbackSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)

View file

@ -15,6 +15,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class GoogleOauthInitiateSpaceEndpoint(View):
@ -33,7 +34,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -46,7 +47,7 @@ class GoogleOauthInitiateSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -65,7 +66,7 @@ class GoogleCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
if not code:
@ -75,7 +76,7 @@ class GoogleCallbackSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = next_path
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
try:
@ -89,6 +90,6 @@ class GoogleCallbackSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)

View file

@ -23,7 +23,7 @@ from plane.authentication.adapter.error import (
AuthenticationException,
AUTHENTICATION_ERROR_CODES,
)
from plane.utils.path_validator import validate_next_path
class MagicGenerateSpaceEndpoint(APIView):
permission_classes = [AllowAny]
@ -38,14 +38,14 @@ class MagicGenerateSpaceEndpoint(APIView):
)
return Response(exc.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
origin = base_host(request=request, is_space=True)
email = request.data.get("email", "").strip().lower()
try:
validate_email(email)
adapter = MagicCodeProvider(request=request, key=email)
key, token = adapter.initiate()
# If the smtp is configured send through here
magic_link.delay(email, key, token, origin)
magic_link.delay(email, key, token)
return Response({"key": str(key)}, status=status.HTTP_200_OK)
except AuthenticationException as e:
return Response(e.get_error_dict(), status=status.HTTP_400_BAD_REQUEST)
@ -67,7 +67,7 @@ class MagicSignInSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -80,7 +80,7 @@ class MagicSignInSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -121,7 +121,7 @@ class MagicSignUpSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
# Existing User
@ -134,7 +134,7 @@ class MagicSignUpSpaceEndpoint(View):
)
params = exc.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)
@ -152,6 +152,6 @@ class MagicSignUpSpaceEndpoint(View):
except AuthenticationException as e:
params = e.get_error_dict()
if next_path:
params["next_path"] = str(next_path)
params["next_path"] = str(validate_next_path(next_path))
url = f"{base_host(request=request, is_space=True)}?{urlencode(params)}"
return HttpResponseRedirect(url)

View file

@ -90,7 +90,7 @@ class ForgotPasswordSpaceEndpoint(APIView):
if user:
# Get the reset token for user
uidb64, token = generate_password_token(user=user)
current_site = request.META.get("HTTP_ORIGIN")
current_site = base_host(request=request, is_space=True)
# send the forgot password email
forgot_password.delay(
user.first_name, user.email, uidb64, token, current_site

View file

@ -7,6 +7,7 @@ from django.utils import timezone
# Module imports
from plane.authentication.utils.host import base_host, user_ip
from plane.db.models import User
from plane.utils.path_validator import validate_next_path
class SignOutAuthSpaceEndpoint(View):
@ -21,8 +22,8 @@ class SignOutAuthSpaceEndpoint(View):
user.save()
# Log the user out
logout(request)
url = f"{base_host(request=request, is_space=True)}{next_path}"
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
return HttpResponseRedirect(url)
except Exception:
url = f"{base_host(request=request, is_space=True)}{next_path}"
url = f"{base_host(request=request, is_space=True)}{str(validate_next_path(next_path)) if next_path else ''}"
return HttpResponseRedirect(url)

View file

@ -32,7 +32,7 @@ from plane.settings.redis import redis_instance
from plane.utils.exception_logger import log_exception
from plane.bgtasks.webhook_task import webhook_activity
from plane.utils.issue_relation_mapper import get_inverse_relation
from plane.utils.valid_uuid import is_valid_uuid
from plane.utils.uuid import is_valid_uuid
# Track Changes in name
@ -307,6 +307,10 @@ def track_labels(
# Set of newly added labels
for added_label in added_labels:
# validate uuids
if not is_valid_uuid(added_label):
continue
label = Label.objects.get(pk=added_label)
issue_activities.append(
IssueActivity(
@ -327,6 +331,10 @@ def track_labels(
# Set of dropped labels
for dropped_label in dropped_labels:
# validate uuids
if not is_valid_uuid(dropped_label):
continue
label = Label.objects.get(pk=dropped_label)
issue_activities.append(
IssueActivity(
@ -373,6 +381,10 @@ def track_assignees(
bulk_subscribers = []
for added_asignee in added_assignees:
# validate uuids
if not is_valid_uuid(added_asignee):
continue
assignee = User.objects.get(pk=added_asignee)
issue_activities.append(
IssueActivity(
@ -406,6 +418,10 @@ def track_assignees(
)
for dropped_assignee in dropped_assginees:
# validate uuids
if not is_valid_uuid(dropped_assignee):
continue
assignee = User.objects.get(pk=dropped_assignee)
issue_activities.append(
IssueActivity(
@ -466,7 +482,7 @@ def track_estimate_points(
),
old_value=old_estimate.value if old_estimate else None,
new_value=new_estimate.value if new_estimate else None,
field="estimate_point",
field="estimate_" + new_estimate.estimate.type,
project_id=project_id,
workspace_id=workspace_id,
comment="updated the estimate point to ",

View file

@ -16,7 +16,7 @@ from plane.utils.exception_logger import log_exception
@shared_task
def magic_link(email, key, token, current_site):
def magic_link(email, key, token):
try:
(
EMAIL_HOST,

View file

@ -1,8 +1,16 @@
# Python imports
import os
import logging
# Third party imports
from celery import Celery
from plane.settings.redis import redis_instance
from pythonjsonlogger.jsonlogger import JsonFormatter
from celery.signals import after_setup_logger, after_setup_task_logger
from celery.schedules import crontab
# Module imports
from plane.settings.redis import redis_instance
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
@ -47,6 +55,28 @@ app.conf.beat_schedule = {
},
}
# Setup logging
@after_setup_logger.connect
def setup_loggers(logger, *args, **kwargs):
formatter = JsonFormatter(
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
)
handler = logging.StreamHandler()
handler.setFormatter(fmt=formatter)
logger.addHandler(handler)
@after_setup_task_logger.connect
def setup_task_loggers(logger, *args, **kwargs):
formatter = JsonFormatter(
'"%(levelname)s %(asctime)s %(module)s %(name)s %(message)s'
)
handler = logging.StreamHandler()
handler.setFormatter(fmt=formatter)
logger.addHandler(handler)
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

View file

@ -0,0 +1,102 @@
# Django imports
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Max
from django.db import connection, transaction
# Module imports
from plane.db.models import Project, Issue, IssueSequence
from plane.utils.uuid import convert_uuid_to_integer
class Command(BaseCommand):
help = "Fix duplicate sequences"
def add_arguments(self, parser):
# Positional argument
parser.add_argument("issue_identifier", type=str, help="Issue Identifier")
def strict_str_to_int(self, s):
if not s.isdigit() and not (s.startswith("-") and s[1:].isdigit()):
raise ValueError("Invalid integer string")
return int(s)
def handle(self, *args, **options):
workspace_slug = input("Workspace slug: ")
if not workspace_slug:
raise CommandError("Workspace slug is required")
issue_identifier = options.get("issue_identifier", False)
# Validate issue_identifier
if not issue_identifier:
raise CommandError("Issue identifier is required")
# Validate issue identifier
try:
identifier = issue_identifier.split("-")
if len(identifier) != 2:
raise ValueError("Invalid issue identifier format")
project_identifier = identifier[0]
issue_sequence = self.strict_str_to_int(identifier[1])
# Fetch the project
project = Project.objects.get(
identifier__iexact=project_identifier, workspace__slug=workspace_slug
)
# Get the issues
issues = Issue.objects.filter(project=project, sequence_id=issue_sequence)
# Check if there are duplicate issues
if not issues.count() > 1:
raise CommandError(
"No duplicate issues found with the given identifier"
)
self.stdout.write(
self.style.SUCCESS(
f"{issues.count()} issues found with identifier {issue_identifier}"
)
)
with transaction.atomic():
# This ensures only one transaction per project can execute this code at a time
lock_key = convert_uuid_to_integer(project.id)
# Acquire an exclusive lock using the project ID as the lock key
with connection.cursor() as cursor:
# Get an exclusive lock using the project ID as the lock key
cursor.execute("SELECT pg_advisory_xact_lock(%s)", [lock_key])
# Get the maximum sequence ID for the project
last_sequence = IssueSequence.objects.filter(project=project).aggregate(
largest=Max("sequence")
)["largest"]
bulk_issues = []
bulk_issue_sequences = []
issue_sequence_map = {
isq.issue_id: isq
for isq in IssueSequence.objects.filter(project=project)
}
# change the ids of duplicate issues
for index, issue in enumerate(issues[1:]):
updated_sequence_id = last_sequence + index + 1
issue.sequence_id = updated_sequence_id
bulk_issues.append(issue)
# Find the same issue sequence instance from the above queryset
sequence_identifier = issue_sequence_map.get(issue.id)
if sequence_identifier:
sequence_identifier.sequence = updated_sequence_id
bulk_issue_sequences.append(sequence_identifier)
Issue.objects.bulk_update(bulk_issues, ["sequence_id"])
IssueSequence.objects.bulk_update(bulk_issue_sequences, ["sequence"])
self.stdout.write(self.style.SUCCESS("Sequence IDs updated successfully"))
except Exception as e:
raise CommandError(str(e))

View file

@ -0,0 +1,78 @@
import time
from django.core.management.base import BaseCommand
from django.db import transaction
from plane.db.models import Workspace
class Command(BaseCommand):
help = "Updates the slug of a soft-deleted workspace by appending the epoch timestamp"
def add_arguments(self, parser):
parser.add_argument(
"slug",
type=str,
help="The slug of the workspace to update",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Run the command without making any changes",
)
def handle(self, *args, **options):
slug = options["slug"]
dry_run = options["dry_run"]
# Get the workspace with the specified slug
try:
workspace = Workspace.all_objects.get(slug=slug)
except Workspace.DoesNotExist:
self.stdout.write(
self.style.ERROR(f"Workspace with slug '{slug}' not found.")
)
return
# Check if the workspace is soft-deleted
if workspace.deleted_at is None:
self.stdout.write(
self.style.WARNING(
f"Workspace '{workspace.name}' (slug: {workspace.slug}) is not deleted."
)
)
return
# Check if the slug already has a timestamp appended
if "__" in workspace.slug and workspace.slug.split("__")[-1].isdigit():
self.stdout.write(
self.style.WARNING(
f"Workspace '{workspace.name}' (slug: {workspace.slug}) already has a timestamp appended."
)
)
return
# Get the deletion timestamp
deletion_timestamp = int(workspace.deleted_at.timestamp())
# Create the new slug with the deletion timestamp
new_slug = f"{workspace.slug}__{deletion_timestamp}"
if dry_run:
self.stdout.write(
f"Would update workspace '{workspace.name}' slug from '{workspace.slug}' to '{new_slug}'"
)
else:
try:
with transaction.atomic():
workspace.slug = new_slug
workspace.save(update_fields=["slug"])
self.stdout.write(
self.style.SUCCESS(
f"Updated workspace '{workspace.name}' slug from '{workspace.slug}' to '{new_slug}'"
)
)
except Exception as e:
self.stdout.write(
self.style.ERROR(
f"Error updating workspace '{workspace.name}': {str(e)}"
)
)

View file

@ -3,7 +3,6 @@
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from sentry_sdk import capture_exception
import uuid
@ -29,7 +28,6 @@ def create_issue_relation(apps, schema_editor):
)
except Exception as e:
print(e)
capture_exception(e)
def update_issue_priority_choice(apps, schema_editor):

View file

@ -0,0 +1,31 @@
# Generated by Django 4.2.17 on 2025-03-04 19:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
("db", "0092_alter_deprecateddashboardwidget_unique_together_and_more"),
]
operations = [
migrations.AddField(
model_name="page",
name="moved_to_page",
field=models.UUIDField(blank=True, null=True),
),
migrations.AddField(
model_name="page",
name="moved_to_project",
field=models.UUIDField(blank=True, null=True),
),
migrations.AddField(
model_name="pageversion",
name="sub_pages_data",
field=models.JSONField(blank=True, default=dict),
),
]

View file

@ -82,4 +82,4 @@ from .label import Label
from .device import Device, DeviceSession
from .sticky import Sticky
from .sticky import Sticky

View file

@ -31,6 +31,10 @@ class Intake(ProjectBaseModel):
ordering = ("name",)
class SourceType(models.TextChoices):
IN_APP = "IN_APP"
class IntakeIssue(ProjectBaseModel):
intake = models.ForeignKey(
"db.Intake", related_name="issue_intake", on_delete=models.CASCADE

View file

@ -6,7 +6,7 @@ from django.conf import settings
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models, transaction
from django.db import models, transaction, connection
from django.utils import timezone
from django.db.models import Q
from django import apps
@ -15,8 +15,8 @@ from django import apps
from plane.utils.html_processor import strip_tags
from plane.db.mixins import SoftDeletionManager
from plane.utils.exception_logger import log_exception
from .base import BaseModel
from .project import ProjectBaseModel
from plane.utils.uuid import convert_uuid_to_integer
def get_default_properties():
@ -209,11 +209,18 @@ class Issue(ProjectBaseModel):
if self._state.adding:
with transaction.atomic():
last_sequence = (
IssueSequence.objects.filter(project=self.project)
.select_for_update()
.aggregate(largest=models.Max("sequence"))["largest"]
)
# Create a lock for this specific project using an advisory lock
# This ensures only one transaction per project can execute this code at a time
lock_key = convert_uuid_to_integer(self.project.id)
with connection.cursor() as cursor:
# Get an exclusive lock using the project ID as the lock key
cursor.execute("SELECT pg_advisory_xact_lock(%s)", [lock_key])
# Get the last sequence for the project
last_sequence = IssueSequence.objects.filter(
project=self.project
).aggregate(largest=models.Max("sequence"))["largest"]
self.sequence_id = last_sequence + 1 if last_sequence else 1
# Strip the html tags using html parser
self.description_stripped = (

View file

@ -50,6 +50,8 @@ class Page(BaseModel):
projects = models.ManyToManyField(
"db.Project", related_name="pages", through="db.ProjectPage"
)
moved_to_page = models.UUIDField(null=True, blank=True)
moved_to_project = models.UUIDField(null=True, blank=True)
class Meta:
verbose_name = "Page"
@ -172,6 +174,7 @@ class PageVersion(BaseModel):
description_html = models.TextField(blank=True, default="<p></p>")
description_stripped = models.TextField(blank=True, null=True)
description_json = models.JSONField(default=dict, blank=True)
sub_pages_data = models.JSONField(default=dict, blank=True)
class Meta:
verbose_name = "Page Version"

View file

@ -1,6 +1,7 @@
# Python imports
import pytz
from uuid import uuid4
from enum import Enum
# Django imports
from django.conf import settings
@ -17,6 +18,15 @@ from .base import BaseModel
ROLE_CHOICES = ((20, "Admin"), (15, "Member"), (5, "Guest"))
class ProjectNetwork(Enum):
SECRET = 0
PUBLIC = 2
@classmethod
def choices(cls):
return [(0, "Secret"), (2, "Public")]
def get_default_props():
return {
"filters": {

View file

@ -1,6 +1,9 @@
# Python imports
from django.db.models.functions import Ln
import pytz
import time
from django.utils import timezone
from typing import Optional, Any, Tuple, Dict
# Django imports
from django.conf import settings
@ -149,6 +152,34 @@ class Workspace(BaseModel):
return self.logo
return None
def delete(
self,
using: Optional[str] = None,
soft: bool = True,
*args: Any,
**kwargs: Any
):
"""
Override the delete method to append epoch timestamp to the slug when soft deleting.
Args:
using: The database alias to use for the deletion.
soft: Whether to perform a soft delete (True) or hard delete (False).
*args: Additional positional arguments.
**kwargs: Additional keyword arguments.
"""
# Call the parent class's delete method first
result = super().delete(using=using, soft=soft, *args, **kwargs)
# If it's a soft delete and the model still exists (not hard deleted)
if soft and hasattr(self, 'deleted_at') and self.deleted_at:
# Use the deleted_at timestamp to update the slug
deletion_timestamp: int = int(self.deleted_at.timestamp())
self.slug = f"{self.slug}__{deletion_timestamp}"
self.save(update_fields=["slug"])
return result
class Meta:
verbose_name = "Workspace"
verbose_name_plural = "Workspaces"
@ -391,7 +422,7 @@ class WorkspaceHomePreference(BaseModel):
class WorkspaceUserPreference(BaseModel):
"""Preference for the workspace for a user"""
class UserPreferenceKeys(models.TextChoices):
class UserPreferenceKeys(models.TextChoices):
VIEWS = "views", "Views"
ACTIVE_CYCLES = "active_cycles", "Active Cycles"
ANALYTICS = "analytics", "Analytics"

View file

@ -33,6 +33,7 @@ from plane.authentication.adapter.error import (
AUTHENTICATION_ERROR_CODES,
AuthenticationException,
)
from plane.utils.ip_address import get_client_ip
class InstanceAdminEndpoint(BaseAPIView):
@ -217,7 +218,7 @@ class InstanceAdminSignUpEndpoint(View):
user.is_active = True
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_ip = get_client_ip(request=request)
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()
@ -344,7 +345,7 @@ class InstanceAdminSignInEndpoint(View):
user.is_active = True
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_ip = get_client_ip(request=request)
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.token_updated_at = timezone.now()
user.save()

View file

@ -109,5 +109,5 @@ class InstanceWorkSpaceEndpoint(BaseAPIView):
if "already exists" in str(e):
return Response(
{"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE,
status=status.HTTP_409_CONFLICT,
)

View file

@ -71,6 +71,12 @@ class Command(BaseCommand):
"category": "GITHUB",
"is_encrypted": True,
},
{
"key": "GITHUB_ORGANIZATION_ID",
"value": os.environ.get("GITHUB_ORGANIZATION_ID"),
"category": "GITHUB",
"is_encrypted": False,
},
{
"key": "GITLAB_HOST",
"value": os.environ.get("GITLAB_HOST"),

View file

@ -1,39 +0,0 @@
from plane.db.models import APIActivityLog
class APITokenLogMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
request_body = request.body
response = self.get_response(request)
self.process_request(request, response, request_body)
return response
def process_request(self, request, response, request_body):
api_key_header = "X-Api-Key"
api_key = request.headers.get(api_key_header)
# If the API key is present, log the request
if api_key:
try:
APIActivityLog.objects.create(
token_identifier=api_key,
path=request.path,
method=request.method,
query_params=request.META.get("QUERY_STRING", ""),
headers=str(request.headers),
body=(request_body.decode("utf-8") if request_body else None),
response_body=(
response.content.decode("utf-8") if response.content else None
),
response_code=response.status_code,
ip_address=request.META.get("REMOTE_ADDR", None),
user_agent=request.META.get("HTTP_USER_AGENT", None),
)
except Exception as e:
print(e)
# If the token does not exist, you can decide whether to log this as an invalid attempt
return None

View file

@ -0,0 +1,111 @@
# Python imports
import logging
import time
# Django imports
from django.http import HttpRequest
# Third party imports
from rest_framework.request import Request
# Module imports
from plane.utils.ip_address import get_client_ip
from plane.db.models import APIActivityLog
api_logger = logging.getLogger("plane.api.request")
class RequestLoggerMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def _should_log_route(self, request: Request | HttpRequest) -> bool:
"""
Determines whether a route should be logged based on the request and status code.
"""
# Don't log health checks
if request.path == "/" and request.method == "GET":
return False
return True
def __call__(self, request):
# get the start time
start_time = time.time()
# Get the response
response = self.get_response(request)
# calculate the duration
duration = time.time() - start_time
# Check if logging is required
log_true = self._should_log_route(request=request)
# If logging is not required, return the response
if not log_true:
return response
user_id = (
request.user.id
if getattr(request, "user")
and getattr(request.user, "is_authenticated", False)
else None
)
user_agent = request.META.get("HTTP_USER_AGENT", "")
# Log the request information
api_logger.info(
f"{request.method} {request.get_full_path()} {response.status_code}",
extra={
"path": request.path,
"method": request.method,
"status_code": response.status_code,
"duration_ms": int(duration * 1000),
"remote_addr": get_client_ip(request),
"user_agent": user_agent,
"user_id": user_id,
},
)
# return the response
return response
class APITokenLogMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
request_body = request.body
response = self.get_response(request)
self.process_request(request, response, request_body)
return response
def process_request(self, request, response, request_body):
api_key_header = "X-Api-Key"
api_key = request.headers.get(api_key_header)
# If the API key is present, log the request
if api_key:
try:
APIActivityLog.objects.create(
token_identifier=api_key,
path=request.path,
method=request.method,
query_params=request.META.get("QUERY_STRING", ""),
headers=str(request.headers),
body=(request_body.decode("utf-8") if request_body else None),
response_body=(
response.content.decode("utf-8") if response.content else None
),
response_code=response.status_code,
ip_address=get_client_ip(request=request),
user_agent=request.META.get("HTTP_USER_AGENT", None),
)
except Exception as e:
api_logger.exception(e)
# If the token does not exist, you can decide whether to log this as an invalid attempt
return None

View file

@ -7,13 +7,9 @@ from urllib.parse import urlparse
# Third party imports
import dj_database_url
import sentry_sdk
# Django imports
from django.core.management.utils import get_random_secret_key
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from corsheaders.defaults import default_headers
@ -26,7 +22,7 @@ SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
DEBUG = int(os.environ.get("DEBUG", "0"))
# Allowed Hosts
ALLOWED_HOSTS = ["*"]
ALLOWED_HOSTS = os.environ.get("ALLOWED_HOSTS", "*").split(",")
# Application definition
INSTALLED_APPS = [
@ -62,7 +58,8 @@ MIDDLEWARE = [
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"crum.CurrentRequestUserMiddleware",
"django.middleware.gzip.GZipMiddleware",
"plane.middleware.api_log_middleware.APITokenLogMiddleware",
"plane.middleware.logger.APITokenLogMiddleware",
"plane.middleware.logger.RequestLoggerMiddleware",
]
# Rest Framework settings
@ -267,25 +264,6 @@ CELERY_IMPORTS = (
"plane.bgtasks.issue_description_version_sync",
)
# Sentry Settings
# Enable Sentry Settings
if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get(
"SENTRY_DSN"
).startswith("https://"):
sentry_sdk.init(
dsn=os.environ.get("SENTRY_DSN", ""),
integrations=[
DjangoIntegration(),
RedisIntegration(),
CeleryIntegration(monitor_beat_tasks=True),
],
traces_sample_rate=1,
send_default_pii=True,
environment=os.environ.get("SENTRY_ENVIRONMENT", "development"),
profiles_sample_rate=float(os.environ.get("SENTRY_PROFILE_SAMPLE_RATE", 0)),
)
FILE_SIZE_LIMIT = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
# Unsplash Access key
@ -337,7 +315,7 @@ ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", None)
SPACE_BASE_URL = os.environ.get("SPACE_BASE_URL", None)
APP_BASE_URL = os.environ.get("APP_BASE_URL")
LIVE_BASE_URL = os.environ.get("LIVE_BASE_URL")
WEB_URL = os.environ.get("WEB_URL")
HARD_DELETE_AFTER_DAYS = int(os.environ.get("HARD_DELETE_AFTER_DAYS", 60))
@ -413,4 +391,8 @@ ATTACHMENT_MIME_TYPES = [
"text/xml",
"text/csv",
"application/xml",
# SQL
"application/x-sql",
# Gzip
"application/x-gzip",
]

View file

@ -37,26 +37,41 @@ if not os.path.exists(LOG_DIR):
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"disable_existing_loggers": True,
"formatters": {
"verbose": {
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
"style": "{",
}
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s",
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
"formatter": "json",
}
},
"loggers": {
"django.request": {
"plane.api.request": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
"plane.api": {"level": "INFO", "handlers": ["console"], "propagate": False},
"plane.worker": {"level": "INFO", "handlers": ["console"], "propagate": False},
"plane.exception": {
"level": "ERROR",
"handlers": ["console"],
"propagate": False,
},
"plane.external": {
"level": "INFO",
"handlers": ["console"],
"level": "DEBUG",
"propagate": False,
},
"plane": {"handlers": ["console"], "level": "DEBUG", "propagate": False},
},
}

View file

@ -26,11 +26,10 @@ if not os.path.exists(LOG_DIR):
# Logging configuration
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"disable_existing_loggers": True,
"formatters": {
"verbose": {
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
"style": "{",
"format": "%(asctime)s [%(process)d] %(levelname)s %(name)s: %(message)s"
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
@ -40,7 +39,7 @@ LOGGING = {
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "verbose",
"formatter": "json",
"level": "INFO",
},
"file": {
@ -59,16 +58,30 @@ LOGGING = {
},
},
"loggers": {
"django": {"handlers": ["console", "file"], "level": "INFO", "propagate": True},
"django.request": {
"handlers": ["console", "file"],
"level": "INFO",
"plane.api.request": {
"level": "DEBUG" if DEBUG else "INFO",
"handlers": ["console"],
"propagate": False,
},
"plane": {
"plane.api": {
"level": "DEBUG" if DEBUG else "INFO",
"handlers": ["console"],
"propagate": False,
},
"plane.worker": {
"level": "DEBUG" if DEBUG else "INFO",
"handlers": ["console"],
"propagate": False,
},
"plane.exception": {
"level": "DEBUG" if DEBUG else "ERROR",
"handlers": ["console", "file"],
"propagate": False,
},
"plane.external": {
"level": "INFO",
"handlers": ["console"],
"propagate": False,
},
},
}

View file

@ -3,6 +3,9 @@ from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Q, UUIDField, Value, F, Case, When, JSONField, CharField
from django.db.models.functions import Coalesce, JSONObject, Concat
from django.db.models import QuerySet
from typing import List, Optional, Dict, Any, Union
# Module imports
from plane.db.models import (
@ -17,13 +20,25 @@ from plane.db.models import (
)
def issue_queryset_grouper(queryset, group_by, sub_group_by):
def issue_queryset_grouper(
queryset: QuerySet[Issue], group_by: Optional[str], sub_group_by: Optional[str]
) -> QuerySet[Issue]:
FIELD_MAPPER = {
"label_ids": "labels__id",
"assignee_ids": "assignees__id",
"module_ids": "issue_module__module_id",
}
GROUP_FILTER_MAPPER = {
"assignees__id": Q(issue_assignee__deleted_at__isnull=True),
"labels__id": Q(label_issue__deleted_at__isnull=True),
"issue_module__module_id": Q(issue_module__deleted_at__isnull=True),
}
for group_key in [group_by, sub_group_by]:
if group_key in GROUP_FILTER_MAPPER:
queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key])
annotations_map = {
"assignee_ids": (
"assignees__id",
@ -50,7 +65,9 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
return queryset.annotate(**default_annotations)
def issue_on_results(issues, group_by, sub_group_by):
def issue_on_results(
issues: QuerySet[Issue], group_by: Optional[str], sub_group_by: Optional[str]
) -> List[Dict[str, Any]]:
FIELD_MAPPER = {
"labels__id": "label_ids",
"assignees__id": "assignee_ids",
@ -160,7 +177,12 @@ def issue_on_results(issues, group_by, sub_group_by):
return issues
def issue_group_values(field, slug, project_id=None, filters=dict):
def issue_group_values(
field: str,
slug: str,
project_id: Optional[str] = None,
filters: Dict[str, Any] = {},
) -> List[Union[str, Any]]:
if field == "state_id":
queryset = State.objects.filter(
is_triage=False, workspace__slug=slug

View file

@ -96,7 +96,7 @@ class EntityAssetEndpoint(BaseAPIView):
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"error": "Invalid file type. Only JPEG, PNG, WebP, JPG and GIF files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,

View file

@ -41,4 +41,4 @@ class WorkSpaceCreateReadUpdateDelete(AuthenticatedAPITest):
response = self.client.post(
url, {"name": "Plane", "slug": "pla-ne"}, format="json"
)
self.assertEqual(response.status_code, status.HTTP_410_GONE)
self.assertEqual(response.status_code, status.HTTP_409_CONFLICT)

View file

@ -5,19 +5,14 @@ import traceback
# Django imports
from django.conf import settings
# Third party imports
from sentry_sdk import capture_exception
def log_exception(e):
# Log the error
logger = logging.getLogger("plane")
logger.error(e)
logger = logging.getLogger("plane.exception")
logger.exception(e)
if settings.DEBUG:
# Print the traceback if in debug mode
print(traceback.format_exc())
# Capture in sentry if configured
capture_exception(e)
return

View file

@ -1,7 +1,7 @@
# Django imports
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import Q, UUIDField, Value
from django.db.models import Q, UUIDField, Value, QuerySet
from django.db.models.functions import Coalesce
# Module imports
@ -15,16 +15,31 @@ from plane.db.models import (
State,
WorkspaceMember,
)
from typing import Optional, Dict, Tuple, Any, Union, List
def issue_queryset_grouper(queryset, group_by, sub_group_by):
FIELD_MAPPER = {
def issue_queryset_grouper(
queryset: QuerySet[Issue],
group_by: Optional[str],
sub_group_by: Optional[str],
) -> QuerySet[Issue]:
FIELD_MAPPER: Dict[str, str] = {
"label_ids": "labels__id",
"assignee_ids": "assignees__id",
"module_ids": "issue_module__module_id",
}
annotations_map = {
GROUP_FILTER_MAPPER: Dict[str, Q] = {
"assignees__id": Q(issue_assignee__deleted_at__isnull=True),
"labels__id": Q(label_issue__deleted_at__isnull=True),
"issue_module__module_id": Q(issue_module__deleted_at__isnull=True),
}
for group_key in [group_by, sub_group_by]:
if group_key in GROUP_FILTER_MAPPER:
queryset = queryset.filter(GROUP_FILTER_MAPPER[group_key])
annotations_map: Dict[str, Tuple[str, Q]] = {
"assignee_ids": (
"assignees__id",
~Q(assignees__id__isnull=True) & Q(issue_assignee__deleted_at__isnull=True),
@ -42,7 +57,8 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
),
),
}
default_annotations = {
default_annotations: Dict[str, Any] = {
key: Coalesce(
ArrayAgg(field, distinct=True, filter=condition),
Value([], output_field=ArrayField(UUIDField())),
@ -54,16 +70,20 @@ def issue_queryset_grouper(queryset, group_by, sub_group_by):
return queryset.annotate(**default_annotations)
def issue_on_results(issues, group_by, sub_group_by):
FIELD_MAPPER = {
def issue_on_results(
issues: QuerySet[Issue],
group_by: Optional[str],
sub_group_by: Optional[str],
) -> List[Dict[str, Any]]:
FIELD_MAPPER: Dict[str, str] = {
"labels__id": "label_ids",
"assignees__id": "assignee_ids",
"issue_module__module_id": "module_ids",
}
original_list = ["assignee_ids", "label_ids", "module_ids"]
original_list: List[str] = ["assignee_ids", "label_ids", "module_ids"]
required_fields = [
required_fields: List[str] = [
"id",
"name",
"state_id",
@ -98,62 +118,72 @@ def issue_on_results(issues, group_by, sub_group_by):
original_list.append(sub_group_by)
required_fields.extend(original_list)
return issues.values(*required_fields)
return list(issues.values(*required_fields))
def issue_group_values(field, slug, project_id=None, filters=dict):
def issue_group_values(
field: str,
slug: str,
project_id: Optional[str] = None,
filters: Dict[str, Any] = {},
) -> List[Union[str, Any]]:
if field == "state_id":
queryset = State.objects.filter(
is_triage=False, workspace__slug=slug
).values_list("id", flat=True)
if project_id:
return list(queryset.filter(project_id=project_id))
else:
return list(queryset)
return list(queryset)
if field == "labels__id":
queryset = Label.objects.filter(workspace__slug=slug).values_list(
"id", flat=True
)
if project_id:
return list(queryset.filter(project_id=project_id)) + ["None"]
else:
return list(queryset) + ["None"]
return list(queryset) + ["None"]
if field == "assignees__id":
if project_id:
return ProjectMember.objects.filter(
workspace__slug=slug, project_id=project_id, is_active=True
).values_list("member_id", flat=True)
else:
return list(
WorkspaceMember.objects.filter(
workspace__slug=slug, is_active=True
ProjectMember.objects.filter(
workspace__slug=slug, project_id=project_id, is_active=True
).values_list("member_id", flat=True)
)
return list(
WorkspaceMember.objects.filter(
workspace__slug=slug, is_active=True
).values_list("member_id", flat=True)
)
if field == "issue_module__module_id":
queryset = Module.objects.filter(workspace__slug=slug).values_list(
"id", flat=True
)
if project_id:
return list(queryset.filter(project_id=project_id)) + ["None"]
else:
return list(queryset) + ["None"]
return list(queryset) + ["None"]
if field == "cycle_id":
queryset = Cycle.objects.filter(workspace__slug=slug).values_list(
"id", flat=True
)
if project_id:
return list(queryset.filter(project_id=project_id)) + ["None"]
else:
return list(queryset) + ["None"]
return list(queryset) + ["None"]
if field == "project_id":
queryset = Project.objects.filter(workspace__slug=slug).values_list(
"id", flat=True
)
return list(queryset)
if field == "priority":
return ["low", "medium", "high", "urgent", "none"]
if field == "state__group":
return ["backlog", "unstarted", "started", "completed", "cancelled"]
if field == "target_date":
queryset = (
Issue.issue_objects.filter(workspace__slug=slug)
@ -163,8 +193,8 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
)
if project_id:
return list(queryset.filter(project_id=project_id))
else:
return list(queryset)
return list(queryset)
if field == "start_date":
queryset = (
Issue.issue_objects.filter(workspace__slug=slug)
@ -174,8 +204,7 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
)
if project_id:
return list(queryset.filter(project_id=project_id))
else:
return list(queryset)
return list(queryset)
if field == "created_by":
queryset = (
@ -186,7 +215,6 @@ def issue_group_values(field, slug, project_id=None, filters=dict):
)
if project_id:
return list(queryset.filter(project_id=project_id))
else:
return list(queryset)
return list(queryset)
return []

View file

@ -1,18 +1,21 @@
# Python imports
from urllib.parse import urlsplit
# Django imports
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpRequest
# Third party imports
from rest_framework.request import Request
def base_host(request, is_admin=False, is_space=False, is_app=False):
# Module imports
from plane.utils.ip_address import get_client_ip
def base_host(request: Request | HttpRequest, is_admin: bool = False, is_space: bool = False, is_app: bool = False) -> str:
"""Utility function to return host / origin from the request"""
# Calculate the base origin from request
base_origin = str(
request.META.get("HTTP_ORIGIN")
or f"{urlsplit(request.META.get('HTTP_REFERER')).scheme}://{urlsplit(request.META.get('HTTP_REFERER')).netloc}"
or f"""{"https" if request.is_secure() else "http"}://{request.get_host()}"""
)
base_origin = settings.WEB_URL or settings.APP_BASE_URL
if not base_origin:
raise ImproperlyConfigured("APP_BASE_URL or WEB_URL is not set")
# Admin redirections
if is_admin:
@ -38,5 +41,5 @@ def base_host(request, is_admin=False, is_space=False, is_app=False):
return base_origin
def user_ip(request):
return str(request.META.get("REMOTE_ADDR"))
def user_ip(request: Request | HttpRequest) -> str:
return get_client_ip(request=request)

View file

@ -0,0 +1,21 @@
# Python imports
from urllib.parse import urlparse
def validate_next_path(next_path: str) -> str:
"""Validates that next_path is a valid path and extracts only the path component."""
parsed_url = urlparse(next_path)
# Ensure next_path is not an absolute URL
if parsed_url.scheme or parsed_url.netloc:
next_path = parsed_url.path # Extract only the path component
# Ensure it starts with a forward slash (indicating a valid relative path)
if not next_path.startswith("/"):
return ""
# Ensure it does not contain dangerous path traversal sequences
if ".." in next_path:
return ""
return next_path

View file

@ -0,0 +1,22 @@
# Python imports
import uuid
import hashlib
def is_valid_uuid(uuid_str):
"""Check if a string is a valid UUID version 4"""
try:
uuid_obj = uuid.UUID(uuid_str)
return uuid_obj.version == 4
except ValueError:
return False
def convert_uuid_to_integer(uuid_val: uuid.UUID) -> int:
"""Convert a UUID to a 64-bit signed integer"""
# Ensure UUID is a string
uuid_value: str = str(uuid_val)
# Hash to 64-bit signed int
h: bytes = hashlib.sha256(uuid_value.encode()).digest()
bigint: int = int.from_bytes(h[:8], byteorder="big", signed=True)
return bigint

View file

@ -1,8 +0,0 @@
import uuid
def is_valid_uuid(uuid_str):
try:
uuid.UUID(uuid_str, version=4)
return True
except ValueError:
return False

View file

@ -26,8 +26,6 @@ faker==25.0.0
django-filter==24.2
# json model
jsonmodels==2.7.0
# sentry
sentry-sdk==2.8.0
# storage
django-storages==1.14.2
# user management
@ -45,7 +43,7 @@ scout-apm==3.1.0
# xlsx generation
openpyxl==3.1.2
# logging
python-json-logger==2.0.7
python-json-logger==3.3.0
# html parser
beautifulsoup4==4.12.3
# analytics

Some files were not shown because too many files have changed in this diff Show more