[WEB-4951] [WEB-4884] feat: work item filters revamp (#7810)
This commit is contained in:
parent
e6a7ca4c72
commit
9aef5d4aa9
160 changed files with 5879 additions and 4881 deletions
10
apps/api/plane/utils/filters/__init__.py
Normal file
10
apps/api/plane/utils/filters/__init__.py
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# Filters module for handling complex filtering operations
|
||||
|
||||
# Import all utilities from base modules
|
||||
from .filter_backend import ComplexFilterBackend
|
||||
from .converters import LegacyToRichFiltersConverter
|
||||
from .filterset import IssueFilterSet
|
||||
|
||||
|
||||
# Public API exports
|
||||
__all__ = ["ComplexFilterBackend", "LegacyToRichFiltersConverter", "IssueFilterSet"]
|
||||
438
apps/api/plane/utils/filters/converters.py
Normal file
438
apps/api/plane/utils/filters/converters.py
Normal file
|
|
@ -0,0 +1,438 @@
|
|||
import re
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
from dateutil.parser import parse as dateutil_parse
|
||||
|
||||
|
||||
class LegacyToRichFiltersConverter:
|
||||
# Default mapping from legacy filter names to new rich filter field names
|
||||
DEFAULT_FIELD_MAPPINGS = {
|
||||
"state": "state_id",
|
||||
"labels": "label_id",
|
||||
"cycle": "cycle_id",
|
||||
"module": "module_id",
|
||||
"assignees": "assignee_id",
|
||||
"mentions": "mention_id",
|
||||
"created_by": "created_by_id",
|
||||
"state_group": "state_group",
|
||||
"priority": "priority",
|
||||
"project": "project_id",
|
||||
"start_date": "start_date",
|
||||
"target_date": "target_date",
|
||||
}
|
||||
|
||||
# Default fields that expect UUID values
|
||||
DEFAULT_UUID_FIELDS = {
|
||||
"state_id",
|
||||
"label_id",
|
||||
"cycle_id",
|
||||
"module_id",
|
||||
"assignee_id",
|
||||
"mention_id",
|
||||
"created_by_id",
|
||||
"project_id",
|
||||
}
|
||||
|
||||
# Default valid choices for choice fields
|
||||
DEFAULT_VALID_CHOICES = {
|
||||
"state_group": ["backlog", "unstarted", "started", "completed", "cancelled"],
|
||||
"priority": ["urgent", "high", "medium", "low", "none"],
|
||||
}
|
||||
|
||||
# Default date fields
|
||||
DEFAULT_DATE_FIELDS = {"start_date", "target_date"}
|
||||
|
||||
# Pattern for relative date strings like "2_weeks" or "3_months"
|
||||
DATE_PATTERN = re.compile(r"(\d+)_(weeks|months)$")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
field_mappings: Dict[str, str] = None,
|
||||
uuid_fields: set = None,
|
||||
valid_choices: Dict[str, List[str]] = None,
|
||||
date_fields: set = None,
|
||||
extend_defaults: bool = True,
|
||||
):
|
||||
"""
|
||||
Initialize the converter with optional custom configurations.
|
||||
|
||||
Args:
|
||||
field_mappings: Custom field mappings (legacy_key -> rich_field_name)
|
||||
uuid_fields: Set of field names that should be validated as UUIDs
|
||||
valid_choices: Dict of valid choices for choice fields
|
||||
date_fields: Set of field names that should be treated as dates
|
||||
extend_defaults: If True, merge with defaults; if False, replace defaults
|
||||
|
||||
Examples:
|
||||
# Use defaults
|
||||
converter = LegacyToRichFiltersConverter()
|
||||
|
||||
# Add custom field mapping
|
||||
converter = LegacyToRichFiltersConverter(
|
||||
field_mappings={"custom_field": "custom_field_id"}
|
||||
)
|
||||
|
||||
# Override priority choices
|
||||
converter = LegacyToRichFiltersConverter(
|
||||
valid_choices={"priority": ["critical", "high", "medium", "low"]}
|
||||
)
|
||||
|
||||
# Complete replacement (not extending defaults)
|
||||
converter = LegacyToRichFiltersConverter(
|
||||
field_mappings={"state": "status_id"},
|
||||
extend_defaults=False
|
||||
)
|
||||
"""
|
||||
if extend_defaults:
|
||||
# Merge with defaults
|
||||
self.FIELD_MAPPINGS = {**self.DEFAULT_FIELD_MAPPINGS}
|
||||
if field_mappings:
|
||||
self.FIELD_MAPPINGS.update(field_mappings)
|
||||
|
||||
self.UUID_FIELDS = {*self.DEFAULT_UUID_FIELDS}
|
||||
if uuid_fields:
|
||||
self.UUID_FIELDS.update(uuid_fields)
|
||||
|
||||
self.VALID_CHOICES = {**self.DEFAULT_VALID_CHOICES}
|
||||
if valid_choices:
|
||||
self.VALID_CHOICES.update(valid_choices)
|
||||
|
||||
self.DATE_FIELDS = {*self.DEFAULT_DATE_FIELDS}
|
||||
if date_fields:
|
||||
self.DATE_FIELDS.update(date_fields)
|
||||
else:
|
||||
# Replace defaults entirely
|
||||
self.FIELD_MAPPINGS = field_mappings or {}
|
||||
self.UUID_FIELDS = uuid_fields or set()
|
||||
self.VALID_CHOICES = valid_choices or {}
|
||||
self.DATE_FIELDS = date_fields or set()
|
||||
|
||||
def add_field_mapping(self, legacy_key: str, rich_field_name: str) -> None:
|
||||
"""Add or update a single field mapping."""
|
||||
self.FIELD_MAPPINGS[legacy_key] = rich_field_name
|
||||
|
||||
def add_uuid_field(self, field_name: str) -> None:
|
||||
"""Add a field that should be validated as UUID."""
|
||||
self.UUID_FIELDS.add(field_name)
|
||||
|
||||
def add_choice_field(self, field_name: str, choices: List[str]) -> None:
|
||||
"""Add or update valid choices for a choice field."""
|
||||
self.VALID_CHOICES[field_name] = choices
|
||||
|
||||
def add_date_field(self, field_name: str) -> None:
|
||||
"""Add a field that should be treated as a date field."""
|
||||
self.DATE_FIELDS.add(field_name)
|
||||
|
||||
def update_mappings(
|
||||
self,
|
||||
field_mappings: Dict[str, str] = None,
|
||||
uuid_fields: set = None,
|
||||
valid_choices: Dict[str, List[str]] = None,
|
||||
date_fields: set = None,
|
||||
) -> None:
|
||||
"""
|
||||
Update multiple configurations at once.
|
||||
|
||||
Args:
|
||||
field_mappings: Additional field mappings to add/update
|
||||
uuid_fields: Additional UUID fields to add
|
||||
valid_choices: Additional choice fields to add/update
|
||||
date_fields: Additional date fields to add
|
||||
"""
|
||||
if field_mappings:
|
||||
self.FIELD_MAPPINGS.update(field_mappings)
|
||||
if uuid_fields:
|
||||
self.UUID_FIELDS.update(uuid_fields)
|
||||
if valid_choices:
|
||||
self.VALID_CHOICES.update(valid_choices)
|
||||
if date_fields:
|
||||
self.DATE_FIELDS.update(date_fields)
|
||||
|
||||
def _validate_uuid(self, value: str) -> bool:
|
||||
"""Validate if a string is a valid UUID"""
|
||||
try:
|
||||
uuid.UUID(str(value))
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def _validate_choice(self, field_name: str, value: str) -> bool:
|
||||
"""Validate if a value is valid for a choice field"""
|
||||
if field_name not in self.VALID_CHOICES:
|
||||
return True # No validation needed for this field
|
||||
return value in self.VALID_CHOICES[field_name]
|
||||
|
||||
def _validate_date(self, value: Union[str, datetime]) -> bool:
|
||||
"""Validate if a value is a valid date using dateutil parser"""
|
||||
if isinstance(value, datetime):
|
||||
return True
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
# Use dateutil for flexible date parsing
|
||||
dateutil_parse(value)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
return False
|
||||
|
||||
def _validate_value(self, rich_field_name: str, value: Any) -> bool:
|
||||
"""Validate a single value based on field type"""
|
||||
if rich_field_name in self.UUID_FIELDS:
|
||||
return self._validate_uuid(value)
|
||||
elif rich_field_name in self.VALID_CHOICES:
|
||||
return self._validate_choice(rich_field_name, value)
|
||||
elif rich_field_name in self.DATE_FIELDS:
|
||||
return self._validate_date(value)
|
||||
return True # No specific validation needed
|
||||
|
||||
def _filter_valid_values(
|
||||
self, rich_field_name: str, values: List[Any]
|
||||
) -> List[Any]:
|
||||
"""Filter out invalid values from a list and return only valid ones"""
|
||||
valid_values = []
|
||||
for value in values:
|
||||
if self._validate_value(rich_field_name, value):
|
||||
valid_values.append(value)
|
||||
return valid_values
|
||||
|
||||
def _add_validation_error(
|
||||
self, strict: bool, validation_errors: List[str], message: str
|
||||
) -> None:
|
||||
"""Add validation error if in strict mode."""
|
||||
if strict:
|
||||
validation_errors.append(message)
|
||||
|
||||
def _add_rich_filter(
|
||||
self, rich_filters: Dict[str, Any], field_name: str, operator: str, value: Any
|
||||
) -> None:
|
||||
"""Add a rich filter with proper field name formatting."""
|
||||
# Convert lists to comma-separated strings for 'in' and 'range' operations
|
||||
if operator in ("in", "range") and isinstance(value, list):
|
||||
value = ",".join(str(v) for v in value)
|
||||
rich_filters[f"{field_name}__{operator}"] = value
|
||||
|
||||
def _handle_value_error(
|
||||
self, e: ValueError, strict: bool, validation_errors: List[str]
|
||||
) -> None:
|
||||
"""Handle ValueError with consistent strict/non-strict behavior."""
|
||||
if strict:
|
||||
validation_errors.append(str(e))
|
||||
# In non-strict mode, we just skip (no action needed)
|
||||
|
||||
def _process_date_field(
|
||||
self,
|
||||
rich_field_name: str,
|
||||
values: List[str],
|
||||
strict: bool,
|
||||
validation_errors: List[str],
|
||||
rich_filters: Dict[str, Any],
|
||||
) -> bool:
|
||||
"""Process date field with basic functionality (exact, range)."""
|
||||
if rich_field_name not in self.DATE_FIELDS:
|
||||
return False
|
||||
|
||||
try:
|
||||
date_filter_result = self._convert_date_value(
|
||||
rich_field_name, values, strict
|
||||
)
|
||||
if date_filter_result:
|
||||
rich_filters.update(date_filter_result)
|
||||
return True
|
||||
except ValueError as e:
|
||||
self._handle_value_error(e, strict, validation_errors)
|
||||
return True
|
||||
|
||||
def _convert_date_value(
|
||||
self, field_name: str, values: List[str], strict: bool = False
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert legacy date values to rich filter format - basic implementation.
|
||||
|
||||
Supports:
|
||||
- Simple dates: "2023-01-01" -> __exact
|
||||
- Basic ranges: ["2023-01-01;after", "2023-12-31;before"] -> __range
|
||||
- Skips complex or relative date patterns
|
||||
|
||||
Args:
|
||||
field_name: Name of the rich filter field
|
||||
values: List of legacy date values
|
||||
strict: If True, raise errors for validation failures
|
||||
|
||||
Raises:
|
||||
ValueError: For malformed date patterns (strict mode)
|
||||
"""
|
||||
# Check for relative dates and skip the entire field if found
|
||||
for value in values:
|
||||
if ";" in value:
|
||||
parts = value.split(";")
|
||||
if len(parts) > 0 and self.DATE_PATTERN.match(parts[0]):
|
||||
# Skip relative date patterns entirely
|
||||
return {}
|
||||
|
||||
# Skip complex conditions (more than 2 values)
|
||||
if len(values) > 2:
|
||||
return {}
|
||||
|
||||
# Process each date value
|
||||
exact_dates = []
|
||||
after_dates = []
|
||||
before_dates = []
|
||||
|
||||
for value in values:
|
||||
if ";" not in value:
|
||||
# Simple date string
|
||||
if not self._validate_date(value):
|
||||
if strict:
|
||||
raise ValueError(f"Invalid date format: {value}")
|
||||
continue
|
||||
exact_dates.append(value)
|
||||
else:
|
||||
# Directional date - only handle basic after/before
|
||||
parts = value.split(";")
|
||||
if len(parts) < 2:
|
||||
if strict:
|
||||
raise ValueError(f"Invalid date format: {value}")
|
||||
continue
|
||||
|
||||
date_part = parts[0]
|
||||
direction = parts[1]
|
||||
|
||||
if not self._validate_date(date_part):
|
||||
if strict:
|
||||
raise ValueError(f"Invalid date format: {date_part}")
|
||||
continue
|
||||
|
||||
if direction == "after":
|
||||
after_dates.append(date_part)
|
||||
elif direction == "before":
|
||||
before_dates.append(date_part)
|
||||
# Skip unsupported directions
|
||||
|
||||
# Determine return format
|
||||
result = {}
|
||||
if len(after_dates) == 1 and len(before_dates) == 1 and len(exact_dates) == 0:
|
||||
# Simple range: one after and one before
|
||||
start_date = min(after_dates[0], before_dates[0])
|
||||
end_date = max(after_dates[0], before_dates[0])
|
||||
self._add_rich_filter(result, field_name, "range", [start_date, end_date])
|
||||
elif len(exact_dates) == 1 and len(after_dates) == 0 and len(before_dates) == 0:
|
||||
# Single exact date
|
||||
self._add_rich_filter(result, field_name, "exact", exact_dates[0])
|
||||
# Skip all other combinations
|
||||
|
||||
return result
|
||||
|
||||
def convert(self, legacy_filters: dict, strict: bool = False) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert legacy filters to rich filters format with validation
|
||||
|
||||
Args:
|
||||
legacy_filters: Dictionary of legacy filters
|
||||
strict: If True, raise exception on validation errors.
|
||||
If False, skip invalid values (default behavior)
|
||||
|
||||
Returns:
|
||||
Dictionary of rich filters
|
||||
|
||||
Raises:
|
||||
ValueError: If strict=True and validation fails
|
||||
"""
|
||||
rich_filters = {}
|
||||
validation_errors = []
|
||||
|
||||
for legacy_key, value in legacy_filters.items():
|
||||
# Skip if value is None or empty
|
||||
if value is None or (isinstance(value, list) and len(value) == 0):
|
||||
continue
|
||||
|
||||
# Skip if legacy key is not in our mappings (not supported in filterset)
|
||||
if legacy_key not in self.FIELD_MAPPINGS:
|
||||
self._add_validation_error(
|
||||
strict, validation_errors, f"Unsupported filter key: {legacy_key}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Get the new field name
|
||||
rich_field_name = self.FIELD_MAPPINGS[legacy_key]
|
||||
|
||||
# Handle list values
|
||||
if isinstance(value, list):
|
||||
# Process date fields with helper method
|
||||
if self._process_date_field(
|
||||
rich_field_name, value, strict, validation_errors, rich_filters
|
||||
):
|
||||
continue
|
||||
|
||||
# Regular non-date field processing
|
||||
# Filter out invalid values
|
||||
valid_values = self._filter_valid_values(rich_field_name, value)
|
||||
|
||||
if not valid_values:
|
||||
self._add_validation_error(
|
||||
strict,
|
||||
validation_errors,
|
||||
f"No valid values found for {legacy_key}: {value}",
|
||||
)
|
||||
continue
|
||||
|
||||
# Check for invalid values if in strict mode
|
||||
if strict and len(valid_values) != len(value):
|
||||
invalid_values = [v for v in value if v not in valid_values]
|
||||
self._add_validation_error(
|
||||
strict,
|
||||
validation_errors,
|
||||
f"Invalid values for {legacy_key}: {invalid_values}",
|
||||
)
|
||||
|
||||
# For list values, always use __in operator for non-date fields
|
||||
self._add_rich_filter(rich_filters, rich_field_name, "in", valid_values)
|
||||
|
||||
else:
|
||||
# Handle single values
|
||||
# Process date fields with helper method
|
||||
if self._process_date_field(
|
||||
rich_field_name, [value], strict, validation_errors, rich_filters
|
||||
):
|
||||
continue
|
||||
|
||||
# For non-list values, use __exact operator for non-date fields
|
||||
if self._validate_value(rich_field_name, value):
|
||||
self._add_rich_filter(rich_filters, rich_field_name, "exact", value)
|
||||
else:
|
||||
error_msg = f"Invalid value for {legacy_key}: {value}"
|
||||
self._add_validation_error(strict, validation_errors, error_msg)
|
||||
|
||||
# Raise validation errors if in strict mode
|
||||
if strict and validation_errors:
|
||||
error_message = f"Filter validation errors: {'; '.join(validation_errors)}"
|
||||
raise ValueError(error_message)
|
||||
|
||||
# Convert flat dict to rich filter format
|
||||
return self._format_as_rich_filter(rich_filters)
|
||||
|
||||
def _format_as_rich_filter(self, flat_filters: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Convert a flat dictionary of filters to the proper rich filter format.
|
||||
|
||||
Args:
|
||||
flat_filters: Dictionary with field__lookup keys and values
|
||||
|
||||
Returns:
|
||||
Rich filter format using logical operators (and/or/not)
|
||||
"""
|
||||
if not flat_filters:
|
||||
return {}
|
||||
|
||||
# If only one filter, return as leaf node
|
||||
if len(flat_filters) == 1:
|
||||
key, value = next(iter(flat_filters.items()))
|
||||
return {key: value}
|
||||
|
||||
# Multiple filters: wrap in 'and' operator
|
||||
filter_conditions = []
|
||||
for key, value in flat_filters.items():
|
||||
filter_conditions.append({key: value})
|
||||
|
||||
return {"and": filter_conditions}
|
||||
380
apps/api/plane/utils/filters/filter_backend.py
Normal file
380
apps/api/plane/utils/filters/filter_backend.py
Normal file
|
|
@ -0,0 +1,380 @@
|
|||
import json
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import QueryDict
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework import filters
|
||||
|
||||
|
||||
class ComplexFilterBackend(filters.BaseFilterBackend):
|
||||
"""
|
||||
Filter backend that supports complex JSON filtering.
|
||||
|
||||
For full, up-to-date examples and usage, see the package README
|
||||
at `plane/utils/filters/README.md`.
|
||||
"""
|
||||
|
||||
filter_param = "filters"
|
||||
default_max_depth = 5
|
||||
|
||||
def filter_queryset(self, request, queryset, view, filter_data=None):
|
||||
"""Normalize filter input and apply JSON-based filtering.
|
||||
|
||||
Accepts explicit `filter_data` (dict or JSON string) or reads the
|
||||
`filter` query parameter. Enforces JSON-only filtering.
|
||||
"""
|
||||
try:
|
||||
if filter_data is not None:
|
||||
normalized = self._normalize_filter_data(filter_data, "filter_data")
|
||||
return self._apply_json_filter(queryset, normalized, view)
|
||||
|
||||
filter_string = request.query_params.get(self.filter_param, None)
|
||||
if not filter_string:
|
||||
return queryset
|
||||
|
||||
normalized = self._normalize_filter_data(filter_string, "filter")
|
||||
return self._apply_json_filter(queryset, normalized, view)
|
||||
except ValidationError:
|
||||
# Propagate validation errors unchanged
|
||||
raise
|
||||
except Exception as e:
|
||||
raise
|
||||
# Convert unexpected errors to ValidationError to keep response consistent
|
||||
raise ValidationError(f"Filter error: {str(e)}")
|
||||
|
||||
def _normalize_filter_data(self, raw_filter, source_label):
|
||||
"""Return a dict from raw filter input or raise a ValidationError.
|
||||
|
||||
- raw_filter may be a dict or a JSON string
|
||||
- source_label is used in error messages (e.g., 'filter_data' or 'filter')
|
||||
"""
|
||||
try:
|
||||
if isinstance(raw_filter, str):
|
||||
return json.loads(raw_filter)
|
||||
if isinstance(raw_filter, dict):
|
||||
return raw_filter
|
||||
raise ValidationError(f"'{source_label}' must be a dict or a JSON string.")
|
||||
except json.JSONDecodeError:
|
||||
raise ValidationError(
|
||||
f"Invalid JSON for '{source_label}'. Expected a valid JSON object."
|
||||
)
|
||||
|
||||
def _apply_json_filter(self, queryset, filter_data, view):
|
||||
"""Process a JSON filter structure using OR/AND/NOT set operations."""
|
||||
if not filter_data:
|
||||
return queryset
|
||||
|
||||
# Validate structure and depth before field allowlist checks
|
||||
max_depth = self._get_max_depth(view)
|
||||
self._validate_structure(filter_data, max_depth=max_depth, current_depth=1)
|
||||
|
||||
# Validate against the view's FilterSet (only declared filters are allowed)
|
||||
self._validate_fields(filter_data, view)
|
||||
|
||||
# Build combined queryset using FilterSet-driven leaf evaluation
|
||||
combined_qs = self._evaluate_node(filter_data, queryset, view)
|
||||
if combined_qs is None:
|
||||
return queryset
|
||||
return combined_qs
|
||||
|
||||
def _validate_fields(self, filter_data, view):
|
||||
"""Validate that filtered fields are defined in the view's FilterSet."""
|
||||
filterset_class = getattr(view, "filterset_class", None)
|
||||
allowed_fields = (
|
||||
set(filterset_class.base_filters.keys()) if filterset_class else None
|
||||
)
|
||||
if not allowed_fields:
|
||||
# If no FilterSet is configured, reject filtering to avoid unintended exposure # noqa: E501
|
||||
raise ValidationError(
|
||||
"Filtering is not enabled for this endpoint (missing filterset_class)"
|
||||
)
|
||||
|
||||
# Extract field names from the filter data
|
||||
fields = self._extract_field_names(filter_data)
|
||||
|
||||
# Check if all fields are allowed
|
||||
for field in fields:
|
||||
# Field keys must match FilterSet filter names (including any lookups)
|
||||
# Example: 'sequence_id__gte' should be declared in base_filters
|
||||
# Special-case __range: require the '<base>__range' filter itself
|
||||
if field not in allowed_fields:
|
||||
raise ValidationError(f"Filtering on field '{field}' is not allowed")
|
||||
|
||||
def _extract_field_names(self, filter_data):
|
||||
"""Extract all field names from a nested filter structure"""
|
||||
if isinstance(filter_data, dict):
|
||||
fields = []
|
||||
for key, value in filter_data.items():
|
||||
if key.lower() in ("or", "and", "not"):
|
||||
# This is a logical operator, process its children
|
||||
if key.lower() == "not":
|
||||
# 'not' has a dict as its value, not a list
|
||||
if isinstance(value, dict):
|
||||
fields.extend(self._extract_field_names(value))
|
||||
else:
|
||||
# 'or' and 'and' have lists as their values
|
||||
for item in value:
|
||||
fields.extend(self._extract_field_names(item))
|
||||
else:
|
||||
# This is a field name
|
||||
fields.append(key)
|
||||
return fields
|
||||
return []
|
||||
|
||||
def _evaluate_node(self, node, base_queryset, view):
|
||||
"""
|
||||
Recursively evaluate a JSON node into a combined queryset using branch-based filtering.
|
||||
|
||||
Rules:
|
||||
- leaf dict → evaluated through DjangoFilterBackend as a mini-querystring
|
||||
- {"or": [...]} → union (|) of children
|
||||
- {"and": [...]} → collect field conditions per branch and apply together
|
||||
- {"not": {...}} → exclude child's rows from the base queryset
|
||||
(complement within base scope)
|
||||
"""
|
||||
if not isinstance(node, dict):
|
||||
return None
|
||||
|
||||
# 'or' combination - requires set operations between children
|
||||
if "or" in node:
|
||||
children = node["or"]
|
||||
if not isinstance(children, list) or not children:
|
||||
return None
|
||||
combined = None
|
||||
for child in children:
|
||||
child_qs = self._evaluate_node(child, base_queryset, view)
|
||||
if child_qs is None:
|
||||
continue
|
||||
combined = child_qs if combined is None else (combined | child_qs)
|
||||
return combined
|
||||
|
||||
# 'and' combination - collect field conditions per branch
|
||||
if "and" in node:
|
||||
children = node["and"]
|
||||
if not isinstance(children, list) or not children:
|
||||
return None
|
||||
return self._evaluate_and_branch(children, base_queryset, view)
|
||||
|
||||
# 'not' negation
|
||||
if "not" in node:
|
||||
child = node["not"]
|
||||
if not isinstance(child, dict):
|
||||
return None
|
||||
child_qs = self._evaluate_node(child, base_queryset, view)
|
||||
if child_qs is None:
|
||||
return None
|
||||
# Use subquery instead of pk__in for better performance
|
||||
# This avoids evaluating child_qs and creating large IN clauses
|
||||
return base_queryset.exclude(pk__in=child_qs.values("pk"))
|
||||
|
||||
# Leaf dict: evaluate via DjangoFilterBackend using FilterSet
|
||||
return self._filter_leaf_via_backend(node, base_queryset, view)
|
||||
|
||||
def _evaluate_and_branch(self, children, base_queryset, view):
|
||||
"""
|
||||
Evaluate an AND branch by collecting field conditions and applying them together.
|
||||
|
||||
This approach is more efficient than individual leaf evaluation because:
|
||||
- Field conditions within the same AND branch are collected and applied together
|
||||
- Only logical operation children require separate evaluation and set intersection
|
||||
- Reduces the number of intermediate querysets and database queries
|
||||
"""
|
||||
collected_conditions = {}
|
||||
logical_querysets = []
|
||||
|
||||
# Separate field conditions from logical operations
|
||||
for child in children:
|
||||
if not isinstance(child, dict):
|
||||
continue
|
||||
|
||||
# Check if this child contains logical operators
|
||||
has_logical = any(
|
||||
k.lower() in ("or", "and", "not")
|
||||
for k in child.keys()
|
||||
if isinstance(k, str)
|
||||
)
|
||||
|
||||
if has_logical:
|
||||
# This child has logical operators, evaluate separately
|
||||
child_qs = self._evaluate_node(child, base_queryset, view)
|
||||
if child_qs is not None:
|
||||
logical_querysets.append(child_qs)
|
||||
else:
|
||||
# This is a leaf with field conditions, collect them
|
||||
collected_conditions.update(child)
|
||||
|
||||
# Start with base queryset
|
||||
result_qs = base_queryset
|
||||
|
||||
# Apply collected field conditions together if any exist
|
||||
if collected_conditions:
|
||||
result_qs = self._filter_leaf_via_backend(
|
||||
collected_conditions, result_qs, view
|
||||
)
|
||||
if result_qs is None:
|
||||
return None
|
||||
|
||||
# Intersect with any logical operation results
|
||||
for logical_qs in logical_querysets:
|
||||
result_qs = result_qs & logical_qs
|
||||
|
||||
return result_qs
|
||||
|
||||
def _filter_leaf_via_backend(self, leaf_conditions, base_queryset, view):
|
||||
"""Evaluate a leaf dict by delegating to DjangoFilterBackend once.
|
||||
|
||||
We serialize the leaf dict into a mini querystring and let the view's
|
||||
filterset_class perform validation, conversion, and filtering. This returns
|
||||
a lazy queryset suitable for set-operations with siblings.
|
||||
"""
|
||||
if not leaf_conditions:
|
||||
return None
|
||||
|
||||
# Build a QueryDict from the leaf conditions
|
||||
qd = QueryDict(mutable=True)
|
||||
for key, value in leaf_conditions.items():
|
||||
# Default serialization to string; QueryDict expects strings
|
||||
if isinstance(value, list):
|
||||
# Repeat key for list values (e.g., __in)
|
||||
qd.setlist(key, [str(v) for v in value])
|
||||
else:
|
||||
qd[key] = "" if value is None else str(value)
|
||||
|
||||
qd = qd.copy()
|
||||
qd._mutable = False
|
||||
|
||||
# Temporarily patch request.GET and delegate to DjangoFilterBackend
|
||||
backend = DjangoFilterBackend()
|
||||
request = view.request
|
||||
original_get = request._request.GET if hasattr(request, "_request") else None
|
||||
try:
|
||||
if hasattr(request, "_request"):
|
||||
request._request.GET = qd
|
||||
return backend.filter_queryset(request, base_queryset, view)
|
||||
finally:
|
||||
if hasattr(request, "_request") and original_get is not None:
|
||||
request._request.GET = original_get
|
||||
|
||||
def _get_max_depth(self, view):
|
||||
"""Return the maximum allowed nesting depth for complex filters.
|
||||
|
||||
Falls back to class default if the view does not specify it or has
|
||||
an invalid value.
|
||||
"""
|
||||
value = getattr(view, "complex_filter_max_depth", self.default_max_depth)
|
||||
try:
|
||||
value_int = int(value)
|
||||
if value_int <= 0:
|
||||
return self.default_max_depth
|
||||
return value_int
|
||||
except Exception:
|
||||
return self.default_max_depth
|
||||
|
||||
def _validate_structure(self, node, max_depth, current_depth):
|
||||
"""Validate JSON structure and enforce nesting depth.
|
||||
|
||||
Rules:
|
||||
- Each object may contain only one logical operator:
|
||||
or/and/not (case-insensitive)
|
||||
- Logical operator objects cannot contain field keys alongside the
|
||||
operator
|
||||
- or/and values must be non-empty lists of dicts
|
||||
- not value must be a dict
|
||||
- Leaf objects must only contain field keys and acceptable values
|
||||
- Depth must not exceed max_depth
|
||||
"""
|
||||
if current_depth > max_depth:
|
||||
raise ValidationError(
|
||||
f"Filter nesting is too deep (max {max_depth}); found depth"
|
||||
f" {current_depth}"
|
||||
)
|
||||
|
||||
if not isinstance(node, dict):
|
||||
raise ValidationError("Each filter node must be a JSON object")
|
||||
|
||||
if not node:
|
||||
raise ValidationError("Filter objects must not be empty")
|
||||
|
||||
logical_keys = [
|
||||
k
|
||||
for k in node.keys()
|
||||
if isinstance(k, str) and k.lower() in ("or", "and", "not")
|
||||
]
|
||||
|
||||
if len(logical_keys) > 1:
|
||||
raise ValidationError(
|
||||
"A filter object cannot contain multiple logical operators at"
|
||||
" the same level"
|
||||
)
|
||||
|
||||
if len(logical_keys) == 1:
|
||||
op_key = logical_keys[0]
|
||||
# must not mix operator with other keys
|
||||
if len(node) != 1:
|
||||
raise ValidationError(
|
||||
f"Cannot mix logical operator '{op_key}' with field keys at"
|
||||
f" the same level"
|
||||
)
|
||||
|
||||
op = op_key.lower()
|
||||
value = node[op_key]
|
||||
|
||||
if op in ("or", "and"):
|
||||
if not isinstance(value, list) or len(value) == 0:
|
||||
raise ValidationError(
|
||||
f"'{op}' must be a non-empty list of filter objects"
|
||||
)
|
||||
for child in value:
|
||||
if not isinstance(child, dict):
|
||||
raise ValidationError(
|
||||
f"All children of '{op}' must be JSON objects"
|
||||
)
|
||||
self._validate_structure(
|
||||
child,
|
||||
max_depth=max_depth,
|
||||
current_depth=current_depth + 1,
|
||||
)
|
||||
return
|
||||
|
||||
if op == "not":
|
||||
if not isinstance(value, dict):
|
||||
raise ValidationError("'not' must be a single JSON object")
|
||||
self._validate_structure(
|
||||
value, max_depth=max_depth, current_depth=current_depth + 1
|
||||
)
|
||||
return
|
||||
|
||||
# Leaf node: validate fields and values
|
||||
self._validate_leaf(node)
|
||||
|
||||
def _validate_leaf(self, leaf):
|
||||
"""Validate a leaf dict containing field lookups and values."""
|
||||
if not isinstance(leaf, dict) or not leaf:
|
||||
raise ValidationError("Leaf filter must be a non-empty JSON object")
|
||||
|
||||
for key, value in leaf.items():
|
||||
if isinstance(key, str) and key.lower() in ("or", "and", "not"):
|
||||
raise ValidationError(
|
||||
"Logical operators cannot appear in a leaf filter object"
|
||||
)
|
||||
|
||||
# Lists/Tuples must contain only scalar values
|
||||
if isinstance(value, (list, tuple)):
|
||||
if len(value) == 0:
|
||||
raise ValidationError(f"List value for '{key}' must not be empty")
|
||||
for item in value:
|
||||
if not self._is_scalar(item):
|
||||
raise ValidationError(
|
||||
f"List value for '{key}' must contain only scalar items"
|
||||
)
|
||||
continue
|
||||
|
||||
# Scalars and None are allowed
|
||||
if not self._is_scalar(value):
|
||||
raise ValidationError(
|
||||
f"Value for '{key}' must be a scalar, null, or list/tuple of"
|
||||
f" scalars"
|
||||
)
|
||||
|
||||
def _is_scalar(self, value):
|
||||
return value is None or isinstance(value, (str, int, float, bool))
|
||||
146
apps/api/plane/utils/filters/filter_migrations.py
Normal file
146
apps/api/plane/utils/filters/filter_migrations.py
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
"""
|
||||
Utilities for migrating legacy filters to rich filters format.
|
||||
|
||||
This module contains helper functions for data migrations that convert
|
||||
filters fields to rich_filters fields using the LegacyToRichFiltersConverter.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
from .converters import LegacyToRichFiltersConverter
|
||||
|
||||
|
||||
logger = logging.getLogger("plane.api.filters.migration")
|
||||
|
||||
|
||||
def migrate_single_model_filters(
|
||||
model_class, model_name: str, converter: LegacyToRichFiltersConverter
|
||||
) -> Tuple[int, int]:
|
||||
"""
|
||||
Migrate filters to rich_filters for a single model.
|
||||
|
||||
Args:
|
||||
model_class: Django model class
|
||||
model_name: Human-readable name for logging
|
||||
converter: Instance of LegacyToRichFiltersConverter
|
||||
|
||||
Returns:
|
||||
Tuple of (updated_count, error_count)
|
||||
"""
|
||||
# Find records that need migration - have filters but empty rich_filters
|
||||
records_to_migrate = model_class.objects.exclude(filters={}).filter(rich_filters={})
|
||||
|
||||
if records_to_migrate.count() == 0:
|
||||
logger.info(f"No {model_name} records need migration")
|
||||
return 0, 0
|
||||
|
||||
logger.info(f"Found {records_to_migrate.count()} {model_name} records to migrate")
|
||||
|
||||
updated_records = []
|
||||
conversion_errors = 0
|
||||
|
||||
for record in records_to_migrate:
|
||||
try:
|
||||
if record.filters: # Double check that filters is not empty
|
||||
rich_filters = converter.convert(record.filters, strict=False)
|
||||
record.rich_filters = rich_filters
|
||||
updated_records.append(record)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to convert filters for {model_name} ID {record.id}: {str(e)}"
|
||||
)
|
||||
conversion_errors += 1
|
||||
continue
|
||||
|
||||
# Bulk update all successfully converted records
|
||||
if updated_records:
|
||||
model_class.objects.bulk_update(
|
||||
updated_records, ["rich_filters"], batch_size=1000
|
||||
)
|
||||
logger.info(f"Successfully updated {len(updated_records)} {model_name} records")
|
||||
|
||||
return len(updated_records), conversion_errors
|
||||
|
||||
|
||||
def migrate_models_filters_to_rich_filters(
|
||||
models_to_migrate: Dict[str, Any],
|
||||
converter: LegacyToRichFiltersConverter,
|
||||
) -> Dict[str, Tuple[int, int]]:
|
||||
"""
|
||||
Migrate legacy filters to rich_filters format for provided models.
|
||||
|
||||
Args:
|
||||
models_to_migrate: Dict mapping model names to model classes
|
||||
|
||||
Returns:
|
||||
Dictionary mapping model names to (updated_count, error_count) tuples
|
||||
"""
|
||||
# Initialize the converter with default settings
|
||||
|
||||
logger.info("Starting filters to rich_filters migration for all models")
|
||||
|
||||
results = {}
|
||||
total_updated = 0
|
||||
total_errors = 0
|
||||
|
||||
for model_name, model_class in models_to_migrate.items():
|
||||
try:
|
||||
updated_count, error_count = migrate_single_model_filters(
|
||||
model_class, model_name, converter
|
||||
)
|
||||
|
||||
results[model_name] = (updated_count, error_count)
|
||||
total_updated += updated_count
|
||||
total_errors += error_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to migrate {model_name}: {str(e)}")
|
||||
results[model_name] = (0, 1)
|
||||
total_errors += 1
|
||||
continue
|
||||
|
||||
# Log final summary
|
||||
logger.info(
|
||||
f"Migration completed for all models. Total updated: {total_updated}, "
|
||||
f"Total errors: {total_errors}"
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def clear_models_rich_filters(models_to_clear: Dict[str, Any]) -> Dict[str, int]:
|
||||
"""
|
||||
Clear rich_filters field for provided models (for reverse migration).
|
||||
|
||||
Args:
|
||||
models_to_clear: Dictionary mapping model names to model classes
|
||||
|
||||
Returns:
|
||||
Dictionary mapping model names to count of cleared records
|
||||
"""
|
||||
logger.info("Starting reverse migration - clearing rich_filters for all models")
|
||||
|
||||
results = {}
|
||||
total_cleared = 0
|
||||
|
||||
for model_name, model_class in models_to_clear.items():
|
||||
try:
|
||||
# Clear rich_filters for all records that have them
|
||||
updated_count = model_class.objects.exclude(rich_filters={}).update(
|
||||
rich_filters={}
|
||||
)
|
||||
results[model_name] = updated_count
|
||||
total_cleared += updated_count
|
||||
logger.info(
|
||||
f"Cleared rich_filters for {updated_count} {model_name} records"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear rich_filters for {model_name}: {str(e)}")
|
||||
results[model_name] = 0
|
||||
continue
|
||||
|
||||
logger.info(f"Reverse migration completed. Total cleared: {total_cleared}")
|
||||
return results
|
||||
180
apps/api/plane/utils/filters/filterset.py
Normal file
180
apps/api/plane/utils/filters/filterset.py
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
import copy
|
||||
|
||||
from django_filters import FilterSet, filters
|
||||
|
||||
from plane.db.models import Issue
|
||||
|
||||
|
||||
class UUIDInFilter(filters.BaseInFilter, filters.UUIDFilter):
|
||||
pass
|
||||
|
||||
|
||||
class CharInFilter(filters.BaseInFilter, filters.CharFilter):
|
||||
pass
|
||||
|
||||
|
||||
class BaseFilterSet(FilterSet):
|
||||
@classmethod
|
||||
def get_filters(cls):
|
||||
"""
|
||||
Get all filters for the filterset, including dynamically created __exact filters.
|
||||
"""
|
||||
# Get the standard filters first
|
||||
filters = super().get_filters()
|
||||
|
||||
# Add __exact versions for filters that have 'exact' lookup
|
||||
exact_filters = {}
|
||||
for filter_name, filter_obj in filters.items():
|
||||
if hasattr(filter_obj, "lookup_expr") and filter_obj.lookup_expr == "exact":
|
||||
exact_field_name = f"{filter_name}__exact"
|
||||
if exact_field_name not in filters:
|
||||
# Copy the filter object as-is and assign it to the new name
|
||||
exact_filters[exact_field_name] = copy.deepcopy(filter_obj)
|
||||
|
||||
# Add the exact filters to the main filters dict
|
||||
filters.update(exact_filters)
|
||||
return filters
|
||||
|
||||
|
||||
class IssueFilterSet(BaseFilterSet):
|
||||
# Custom filter methods to handle soft delete exclusion for relations
|
||||
|
||||
assignee_id = filters.UUIDFilter(method="filter_assignee_id")
|
||||
assignee_id__in = UUIDInFilter(method="filter_assignee_id_in", lookup_expr="in")
|
||||
|
||||
cycle_id = filters.UUIDFilter(method="filter_cycle_id")
|
||||
cycle_id__in = UUIDInFilter(method="filter_cycle_id_in", lookup_expr="in")
|
||||
|
||||
module_id = filters.UUIDFilter(method="filter_module_id")
|
||||
module_id__in = UUIDInFilter(method="filter_module_id_in", lookup_expr="in")
|
||||
|
||||
mention_id = filters.UUIDFilter(method="filter_mention_id")
|
||||
mention_id__in = UUIDInFilter(method="filter_mention_id_in", lookup_expr="in")
|
||||
|
||||
label_id = filters.UUIDFilter(method="filter_label_id")
|
||||
label_id__in = UUIDInFilter(method="filter_label_id_in", lookup_expr="in")
|
||||
|
||||
# Direct field lookups remain the same
|
||||
created_by_id = filters.UUIDFilter(field_name="created_by_id")
|
||||
created_by_id__in = UUIDInFilter(field_name="created_by_id", lookup_expr="in")
|
||||
|
||||
is_archived = filters.BooleanFilter(method="filter_is_archived")
|
||||
|
||||
state_group = filters.CharFilter(field_name="state__group")
|
||||
state_group__in = CharInFilter(field_name="state__group", lookup_expr="in")
|
||||
|
||||
state_id = filters.UUIDFilter(field_name="state_id")
|
||||
state_id__in = UUIDInFilter(field_name="state_id", lookup_expr="in")
|
||||
|
||||
project_id = filters.UUIDFilter(field_name="project_id")
|
||||
project_id__in = UUIDInFilter(field_name="project_id", lookup_expr="in")
|
||||
|
||||
subscriber_id = filters.UUIDFilter(method="filter_subscriber_id")
|
||||
subscriber_id__in = UUIDInFilter(method="filter_subscriber_id_in", lookup_expr="in")
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = {
|
||||
"start_date": ["exact", "range"],
|
||||
"target_date": ["exact", "range"],
|
||||
"created_at": ["exact", "range"],
|
||||
"is_draft": ["exact"],
|
||||
"priority": ["exact", "in"],
|
||||
}
|
||||
|
||||
def filter_is_archived(self, queryset, name, value):
|
||||
"""
|
||||
Convenience filter: archived=true -> archived_at is not null,
|
||||
archived=false -> archived_at is null
|
||||
"""
|
||||
if value in (True, "true", "True", 1, "1"):
|
||||
return queryset.filter(archived_at__isnull=False)
|
||||
if value in (False, "false", "False", 0, "0"):
|
||||
return queryset.filter(archived_at__isnull=True)
|
||||
return queryset
|
||||
|
||||
# Filter methods with soft delete exclusion for relations
|
||||
|
||||
def filter_assignee_id(self, queryset, name, value):
|
||||
"""Filter by assignee ID, excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_assignee__assignee_id=value,
|
||||
issue_assignee__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_assignee_id_in(self, queryset, name, value):
|
||||
"""Filter by assignee IDs (in), excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_assignee__assignee_id__in=value,
|
||||
issue_assignee__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_cycle_id(self, queryset, name, value):
|
||||
"""Filter by cycle ID, excluding soft deleted cycles"""
|
||||
return queryset.filter(
|
||||
issue_cycle__cycle_id=value,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_cycle_id_in(self, queryset, name, value):
|
||||
"""Filter by cycle IDs (in), excluding soft deleted cycles"""
|
||||
return queryset.filter(
|
||||
issue_cycle__cycle_id__in=value,
|
||||
issue_cycle__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_module_id(self, queryset, name, value):
|
||||
"""Filter by module ID, excluding soft deleted modules"""
|
||||
return queryset.filter(
|
||||
issue_module__module_id=value,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_module_id_in(self, queryset, name, value):
|
||||
"""Filter by module IDs (in), excluding soft deleted modules"""
|
||||
return queryset.filter(
|
||||
issue_module__module_id__in=value,
|
||||
issue_module__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_mention_id(self, queryset, name, value):
|
||||
"""Filter by mention ID, excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_mention__mention_id=value,
|
||||
issue_mention__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_mention_id_in(self, queryset, name, value):
|
||||
"""Filter by mention IDs (in), excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_mention__mention_id__in=value,
|
||||
issue_mention__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_label_id(self, queryset, name, value):
|
||||
"""Filter by label ID, excluding soft deleted labels"""
|
||||
return queryset.filter(
|
||||
label_issue__label_id=value,
|
||||
label_issue__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_label_id_in(self, queryset, name, value):
|
||||
"""Filter by label IDs (in), excluding soft deleted labels"""
|
||||
return queryset.filter(
|
||||
label_issue__label_id__in=value,
|
||||
label_issue__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_subscriber_id(self, queryset, name, value):
|
||||
"""Filter by subscriber ID, excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_subscribers__subscriber_id=value,
|
||||
issue_subscribers__deleted_at__isnull=True,
|
||||
)
|
||||
|
||||
def filter_subscriber_id_in(self, queryset, name, value):
|
||||
"""Filter by subscriber IDs (in), excluding soft deleted users"""
|
||||
return queryset.filter(
|
||||
issue_subscribers__subscriber_id__in=value,
|
||||
issue_subscribers__deleted_at__isnull=True,
|
||||
)
|
||||
|
|
@ -148,6 +148,7 @@ def issue_group_values(
|
|||
slug: str,
|
||||
project_id: Optional[str] = None,
|
||||
filters: Dict[str, Any] = {},
|
||||
queryset: Optional[QuerySet] = None,
|
||||
) -> List[Union[str, Any]]:
|
||||
if field == "state_id":
|
||||
queryset = State.objects.filter(
|
||||
|
|
@ -207,36 +208,24 @@ def issue_group_values(
|
|||
return ["backlog", "unstarted", "started", "completed", "cancelled"]
|
||||
|
||||
if field == "target_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.values_list("target_date", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
queryset = queryset.values_list("target_date", flat=True).distinct()
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
return list(queryset)
|
||||
else:
|
||||
return list(queryset)
|
||||
|
||||
if field == "start_date":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.values_list("start_date", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
queryset = queryset.values_list("start_date", flat=True).distinct()
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
return list(queryset)
|
||||
else:
|
||||
return list(queryset)
|
||||
|
||||
if field == "created_by":
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.values_list("created_by", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
queryset = queryset.values_list("created_by", flat=True).distinct()
|
||||
if project_id:
|
||||
return list(queryset.filter(project_id=project_id))
|
||||
return list(queryset)
|
||||
else:
|
||||
return list(queryset)
|
||||
|
||||
return []
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue