[WEB-5044] fix: ruff lint and format errors (#7868)

* fix: lint errors

* fix: file formatting

* fix: code refactor
This commit is contained in:
sriram veeraghanta 2025-09-29 19:15:32 +05:30 committed by GitHub
parent 1fb22bd252
commit 9237f568dd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
261 changed files with 2199 additions and 6378 deletions

View file

@ -4,7 +4,7 @@ import json
# Django imports
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import F, Func, OuterRef, Q, Prefetch, Exists, Subquery, Count
from django.db.models import OuterRef, Q, Prefetch, Exists, Subquery, Count
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
@ -57,9 +57,7 @@ class IssueArchiveViewSet(BaseViewSet):
return (
issues.annotate(
cycle_id=Subquery(
CycleIssue.objects.filter(
issue=OuterRef("id"), deleted_at__isnull=True
).values("cycle_id")[:1]
CycleIssue.objects.filter(issue=OuterRef("id"), deleted_at__isnull=True).values("cycle_id")[:1]
)
)
.annotate(
@ -110,11 +108,7 @@ class IssueArchiveViewSet(BaseViewSet):
issue_queryset = self.get_queryset()
issue_queryset = (
issue_queryset
if show_sub_issues == "true"
else issue_queryset.filter(parent__isnull=True)
)
issue_queryset = issue_queryset if show_sub_issues == "true" else issue_queryset.filter(parent__isnull=True)
# Apply filtering from filterset
issue_queryset = self.filter_queryset(issue_queryset)
@ -137,18 +131,14 @@ class IssueArchiveViewSet(BaseViewSet):
sub_group_by = request.GET.get("sub_group_by", False)
# issue queryset
issue_queryset = issue_queryset_grouper(
queryset=issue_queryset, group_by=group_by, sub_group_by=sub_group_by
)
issue_queryset = issue_queryset_grouper(queryset=issue_queryset, group_by=group_by, sub_group_by=sub_group_by)
if group_by:
# Check group and sub group value paginate
if sub_group_by:
if group_by == sub_group_by:
return Response(
{
"error": "Group by and sub group by cannot have same parameters"
},
{"error": "Group by and sub group by cannot have same parameters"},
status=status.HTTP_400_BAD_REQUEST,
)
else:
@ -220,9 +210,7 @@ class IssueArchiveViewSet(BaseViewSet):
request=request,
queryset=issue_queryset,
total_count_queryset=total_issue_queryset,
on_results=lambda issues: issue_on_results(
group_by=group_by, issues=issues, sub_group_by=sub_group_by
),
on_results=lambda issues: issue_on_results(group_by=group_by, issues=issues, sub_group_by=sub_group_by),
)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
@ -263,9 +251,7 @@ class IssueArchiveViewSet(BaseViewSet):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def archive(self, request, slug, project_id, pk=None):
issue = Issue.issue_objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
issue = Issue.issue_objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
if issue.state.group not in ["completed", "cancelled"]:
return Response(
{"error": "Can only archive completed or cancelled state group issue"},
@ -273,15 +259,11 @@ class IssueArchiveViewSet(BaseViewSet):
)
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"archived_at": str(timezone.now().date()), "automation": False}
),
requested_data=json.dumps({"archived_at": str(timezone.now().date()), "automation": False}),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
current_instance=json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=base_host(request=request, is_app=True),
@ -289,9 +271,7 @@ class IssueArchiveViewSet(BaseViewSet):
issue.archived_at = timezone.now().date()
issue.save()
return Response(
{"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK
)
return Response({"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK)
@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def unarchive(self, request, slug, project_id, pk=None):
@ -307,9 +287,7 @@ class IssueArchiveViewSet(BaseViewSet):
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
current_instance=json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=base_host(request=request, is_app=True),
@ -328,13 +306,11 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
return Response(
{"error": "Issue IDs are required"}, status=status.HTTP_400_BAD_REQUEST
)
return Response({"error": "Issue IDs are required"}, status=status.HTTP_400_BAD_REQUEST)
issues = Issue.objects.filter(
workspace__slug=slug, project_id=project_id, pk__in=issue_ids
).select_related("state")
issues = Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk__in=issue_ids).select_related(
"state"
)
bulk_archive_issues = []
for issue in issues:
if issue.state.group not in ["completed", "cancelled"]:
@ -347,15 +323,11 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
)
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"archived_at": str(timezone.now().date()), "automation": False}
),
requested_data=json.dumps({"archived_at": str(timezone.now().date()), "automation": False}),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
current_instance=json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=base_host(request=request, is_app=True),
@ -364,6 +336,4 @@ class BulkArchiveIssuesEndpoint(BaseAPIView):
bulk_archive_issues.append(issue)
Issue.objects.bulk_update(bulk_archive_issues, ["archived_at"])
return Response(
{"archived_at": str(timezone.now().date())}, status=status.HTTP_200_OK
)
return Response({"archived_at": str(timezone.now().date())}, status=status.HTTP_200_OK)