chore: formatting all python files using black formatter (#3366)

This commit is contained in:
sriram veeraghanta 2024-01-13 19:05:06 +05:30
parent 4b0d48b290
commit 11f84a986c
235 changed files with 12967 additions and 4168 deletions

View file

@ -101,7 +101,9 @@ def get_assignee_details(slug, filters):
def get_label_details(slug, filters):
"""Fetch label details if required"""
return (
Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False)
Issue.objects.filter(
workspace__slug=slug, **filters, labels__id__isnull=False
)
.distinct("labels__id")
.order_by("labels__id")
.values("labels__id", "labels__color", "labels__name")
@ -174,7 +176,9 @@ def generate_segmented_rows(
):
segment_zero = list(
set(
item.get("segment") for sublist in distribution.values() for item in sublist
item.get("segment")
for sublist in distribution.values()
for item in sublist
)
)
@ -193,7 +197,9 @@ def generate_segmented_rows(
]
for segment in segment_zero:
value = next((x.get(key) for x in data if x.get("segment") == segment), "0")
value = next(
(x.get(key) for x in data if x.get("segment") == segment), "0"
)
generated_row.append(value)
if x_axis == ASSIGNEE_ID:
@ -212,7 +218,11 @@ def generate_segmented_rows(
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
(
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(item)
),
None,
)
@ -221,7 +231,11 @@ def generate_segmented_rows(
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
(
sta
for sta in state_details
if str(sta[STATE_ID]) == str(item)
),
None,
)
@ -230,7 +244,11 @@ def generate_segmented_rows(
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
(
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(item)
),
None,
)
@ -239,7 +257,11 @@ def generate_segmented_rows(
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
(
mod
for mod in module_details
if str(mod[MODULE_ID]) == str(item)
),
None,
)
@ -266,7 +288,11 @@ def generate_segmented_rows(
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)),
(
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(segm)
),
None,
)
if label:
@ -275,7 +301,11 @@ def generate_segmented_rows(
if segmented == STATE_ID:
for index, segm in enumerate(row_zero[2:]):
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(segm)),
(
sta
for sta in state_details
if str(sta[STATE_ID]) == str(segm)
),
None,
)
if state:
@ -284,7 +314,11 @@ def generate_segmented_rows(
if segmented == MODULE_ID:
for index, segm in enumerate(row_zero[2:]):
module = next(
(mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)),
(
mod
for mod in label_details
if str(mod[MODULE_ID]) == str(segm)
),
None,
)
if module:
@ -293,7 +327,11 @@ def generate_segmented_rows(
if segmented == CYCLE_ID:
for index, segm in enumerate(row_zero[2:]):
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)),
(
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(segm)
),
None,
)
if cycle:
@ -315,7 +353,10 @@ def generate_non_segmented_rows(
):
rows = []
for item, data in distribution.items():
row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")]
row = [
item,
data[0].get("count" if y_axis == "issue_count" else "estimate"),
]
if x_axis == ASSIGNEE_ID:
assignee = next(
@ -333,7 +374,11 @@ def generate_non_segmented_rows(
if x_axis == LABEL_ID:
label = next(
(lab for lab in label_details if str(lab[LABEL_ID]) == str(item)),
(
lab
for lab in label_details
if str(lab[LABEL_ID]) == str(item)
),
None,
)
@ -342,7 +387,11 @@ def generate_non_segmented_rows(
if x_axis == STATE_ID:
state = next(
(sta for sta in state_details if str(sta[STATE_ID]) == str(item)),
(
sta
for sta in state_details
if str(sta[STATE_ID]) == str(item)
),
None,
)
@ -351,7 +400,11 @@ def generate_non_segmented_rows(
if x_axis == CYCLE_ID:
cycle = next(
(cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)),
(
cyc
for cyc in cycle_details
if str(cyc[CYCLE_ID]) == str(item)
),
None,
)
@ -360,7 +413,11 @@ def generate_non_segmented_rows(
if x_axis == MODULE_ID:
module = next(
(mod for mod in module_details if str(mod[MODULE_ID]) == str(item)),
(
mod
for mod in module_details
if str(mod[MODULE_ID]) == str(item)
),
None,
)
@ -369,7 +426,10 @@ def generate_non_segmented_rows(
rows.append(tuple(row))
row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")]
row_zero = [
row_mapping.get(x_axis, "X-Axis"),
row_mapping.get(y_axis, "Y-Axis"),
]
return [tuple(row_zero)] + rows

View file

@ -2,4 +2,4 @@ from django.apps import AppConfig
class BgtasksConfig(AppConfig):
name = 'plane.bgtasks'
name = "plane.bgtasks"

View file

@ -40,22 +40,24 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
email,
event=event_name,
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"medium": medium,
"first_time": first_time
}
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"medium": medium,
"first_time": first_time,
},
)
except Exception as e:
capture_exception(e)
@shared_task
def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_from):
def workspace_invite_event(
user, email, user_agent, ip, event_name, accepted_from
):
try:
POSTHOG_API_KEY, POSTHOG_HOST = posthogConfiguration()
@ -65,14 +67,14 @@ def workspace_invite_event(user, email, user_agent, ip, event_name, accepted_fro
email,
event=event_name,
properties={
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"accepted_from": accepted_from
}
"event_id": uuid.uuid4().hex,
"user": {"email": email, "id": str(user)},
"device_ctx": {
"ip": ip,
"user_agent": user_agent,
},
"accepted_from": accepted_from,
},
)
except Exception as e:
capture_exception(e)
capture_exception(e)

View file

@ -68,7 +68,9 @@ def create_zip_file(files):
def upload_to_s3(zip_file, workspace_id, token_id, slug):
file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
file_name = (
f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip"
)
expires_in = 7 * 24 * 60 * 60
if settings.USE_MINIO:
@ -87,7 +89,10 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
)
presigned_url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Key": file_name,
},
ExpiresIn=expires_in,
)
# Create the new url with updated domain and protocol
@ -112,7 +117,10 @@ def upload_to_s3(zip_file, workspace_id, token_id, slug):
presigned_url = s3.generate_presigned_url(
"get_object",
Params={"Bucket": settings.AWS_STORAGE_BUCKET_NAME, "Key": file_name},
Params={
"Bucket": settings.AWS_STORAGE_BUCKET_NAME,
"Key": file_name,
},
ExpiresIn=expires_in,
)
@ -172,11 +180,17 @@ def generate_json_row(issue):
else "",
"Labels": issue["labels__name"],
"Cycle Name": issue["issue_cycle__cycle__name"],
"Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]),
"Cycle Start Date": dateConverter(
issue["issue_cycle__cycle__start_date"]
),
"Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]),
"Module Name": issue["issue_module__module__name"],
"Module Start Date": dateConverter(issue["issue_module__module__start_date"]),
"Module Target Date": dateConverter(issue["issue_module__module__target_date"]),
"Module Start Date": dateConverter(
issue["issue_module__module__start_date"]
),
"Module Target Date": dateConverter(
issue["issue_module__module__target_date"]
),
"Created At": dateTimeConverter(issue["created_at"]),
"Updated At": dateTimeConverter(issue["updated_at"]),
"Completed At": dateTimeConverter(issue["completed_at"]),
@ -211,7 +225,11 @@ def update_json_row(rows, row):
def update_table_row(rows, row):
matched_index = next(
(index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]),
(
index
for index, existing_row in enumerate(rows)
if existing_row[0] == row[0]
),
None,
)
@ -260,7 +278,9 @@ def generate_xlsx(header, project_id, issues, files):
@shared_task
def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug):
def issue_export_task(
provider, workspace_id, project_ids, token_id, multiple, slug
):
try:
exporter_instance = ExporterHistory.objects.get(token=token_id)
exporter_instance.status = "processing"
@ -273,9 +293,14 @@ def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, s
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
)
.select_related("project", "workspace", "state", "parent", "created_by")
.select_related(
"project", "workspace", "state", "parent", "created_by"
)
.prefetch_related(
"assignees", "labels", "issue_cycle__cycle", "issue_module__module"
"assignees",
"labels",
"issue_cycle__cycle",
"issue_module__module",
)
.values(
"id",

View file

@ -19,7 +19,8 @@ from plane.db.models import ExporterHistory
def delete_old_s3_link():
# Get a list of keys and IDs to process
expired_exporter_history = ExporterHistory.objects.filter(
Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8))
Q(url__isnull=False)
& Q(created_at__lte=timezone.now() - timedelta(days=8))
).values_list("key", "id")
if settings.USE_MINIO:
s3 = boto3.client(
@ -42,8 +43,12 @@ def delete_old_s3_link():
# Delete object from S3
if file_name:
if settings.USE_MINIO:
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
s3.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name
)
else:
s3.delete_object(Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name)
s3.delete_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME, Key=file_name
)
ExporterHistory.objects.filter(id=exporter_id).update(url=None)

View file

@ -14,10 +14,10 @@ from plane.db.models import FileAsset
@shared_task
def delete_file_asset():
# file assets to delete
file_assets_to_delete = FileAsset.objects.filter(
Q(is_deleted=True) & Q(updated_at__lte=timezone.now() - timedelta(days=7))
Q(is_deleted=True)
& Q(updated_at__lte=timezone.now() - timedelta(days=7))
)
# Delete the file from storage and the file object from the database
@ -26,4 +26,3 @@ def delete_file_asset():
file_asset.asset.delete(save=False)
# Delete the file object
file_asset.delete()

View file

@ -42,7 +42,9 @@ def forgot_password(first_name, email, uidb64, token, current_site):
"email": email,
}
html_content = render_to_string("emails/auth/forgot_password.html", context)
html_content = render_to_string(
"emails/auth/forgot_password.html", context
)
text_content = strip_tags(html_content)

View file

@ -130,12 +130,17 @@ def service_importer(service, importer_id):
repository_id = importer.metadata.get("repository_id", False)
workspace_integration = WorkspaceIntegration.objects.get(
workspace_id=importer.workspace_id, integration__provider="github"
workspace_id=importer.workspace_id,
integration__provider="github",
)
# Delete the old repository object
GithubRepositorySync.objects.filter(project_id=importer.project_id).delete()
GithubRepository.objects.filter(project_id=importer.project_id).delete()
GithubRepositorySync.objects.filter(
project_id=importer.project_id
).delete()
GithubRepository.objects.filter(
project_id=importer.project_id
).delete()
# Create a Label for github
label = Label.objects.filter(

View file

@ -138,8 +138,12 @@ def track_parent(
project_id=project_id,
workspace_id=workspace_id,
comment=f"updated the parent issue to",
old_identifier=old_parent.id if old_parent is not None else None,
new_identifier=new_parent.id if new_parent is not None else None,
old_identifier=old_parent.id
if old_parent is not None
else None,
new_identifier=new_parent.id
if new_parent is not None
else None,
epoch=epoch,
)
)
@ -217,7 +221,9 @@ def track_target_date(
issue_activities,
epoch,
):
if current_instance.get("target_date") != requested_data.get("target_date"):
if current_instance.get("target_date") != requested_data.get(
"target_date"
):
issue_activities.append(
IssueActivity(
issue_id=issue_id,
@ -281,8 +287,12 @@ def track_labels(
issue_activities,
epoch,
):
requested_labels = set([str(lab) for lab in requested_data.get("labels", [])])
current_labels = set([str(lab) for lab in current_instance.get("labels", [])])
requested_labels = set(
[str(lab) for lab in requested_data.get("labels", [])]
)
current_labels = set(
[str(lab) for lab in current_instance.get("labels", [])]
)
added_labels = requested_labels - current_labels
dropped_labels = current_labels - requested_labels
@ -339,8 +349,12 @@ def track_assignees(
issue_activities,
epoch,
):
requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])])
current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])])
requested_assignees = set(
[str(asg) for asg in requested_data.get("assignees", [])]
)
current_assignees = set(
[str(asg) for asg in current_instance.get("assignees", [])]
)
added_assignees = requested_assignees - current_assignees
dropped_assginees = current_assignees - requested_assignees
@ -392,7 +406,9 @@ def track_estimate_points(
issue_activities,
epoch,
):
if current_instance.get("estimate_point") != requested_data.get("estimate_point"):
if current_instance.get("estimate_point") != requested_data.get(
"estimate_point"
):
issue_activities.append(
IssueActivity(
issue_id=issue_id,
@ -423,7 +439,9 @@ def track_archive_at(
issue_activities,
epoch,
):
if current_instance.get("archived_at") != requested_data.get("archived_at"):
if current_instance.get("archived_at") != requested_data.get(
"archived_at"
):
if requested_data.get("archived_at") is None:
issue_activities.append(
IssueActivity(
@ -536,7 +554,9 @@ def update_issue_activity(
"closed_to": track_closed_to,
}
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -589,7 +609,9 @@ def create_comment_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -621,12 +643,16 @@ def update_comment_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
if current_instance.get("comment_html") != requested_data.get("comment_html"):
if current_instance.get("comment_html") != requested_data.get(
"comment_html"
):
issue_activities.append(
IssueActivity(
issue_id=issue_id,
@ -680,14 +706,18 @@ def create_cycle_issue_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
# Updated Records:
updated_records = current_instance.get("updated_cycle_issues", [])
created_records = json.loads(current_instance.get("created_cycle_issues", []))
created_records = json.loads(
current_instance.get("created_cycle_issues", [])
)
for updated_record in updated_records:
old_cycle = Cycle.objects.filter(
@ -756,7 +786,9 @@ def delete_cycle_issue_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -798,14 +830,18 @@ def create_module_issue_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
# Updated Records:
updated_records = current_instance.get("updated_module_issues", [])
created_records = json.loads(current_instance.get("created_module_issues", []))
created_records = json.loads(
current_instance.get("created_module_issues", [])
)
for updated_record in updated_records:
old_module = Module.objects.filter(
@ -873,7 +909,9 @@ def delete_module_issue_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -915,7 +953,9 @@ def create_link_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -946,7 +986,9 @@ def update_link_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -1010,7 +1052,9 @@ def create_attachment_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -1065,7 +1109,9 @@ def create_issue_reaction_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("reaction") is not None:
issue_reaction = (
IssueReaction.objects.filter(
@ -1137,7 +1183,9 @@ def create_comment_reaction_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("reaction") is not None:
comment_reaction_id, comment_id = (
CommentReaction.objects.filter(
@ -1148,7 +1196,9 @@ def create_comment_reaction_activity(
.values_list("id", "comment__id")
.first()
)
comment = IssueComment.objects.get(pk=comment_id, project_id=project_id)
comment = IssueComment.objects.get(
pk=comment_id, project_id=project_id
)
if (
comment is not None
and comment_reaction_id is not None
@ -1222,7 +1272,9 @@ def create_issue_vote_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
if requested_data and requested_data.get("vote") is not None:
issue_activities.append(
IssueActivity(
@ -1284,12 +1336,14 @@ def create_issue_relation_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
if current_instance is None and requested_data.get("issues") is not None:
for related_issue in requested_data.get("issues"):
for related_issue in requested_data.get("issues"):
issue = Issue.objects.get(pk=related_issue)
issue_activities.append(
IssueActivity(
@ -1339,7 +1393,9 @@ def delete_issue_relation_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -1382,6 +1438,7 @@ def delete_issue_relation_activity(
)
)
def create_draft_issue_activity(
requested_data,
current_instance,
@ -1416,7 +1473,9 @@ def update_draft_issue_activity(
issue_activities,
epoch,
):
requested_data = json.loads(requested_data) if requested_data is not None else None
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
@ -1543,7 +1602,9 @@ def issue_activity(
)
# Save all the values to database
issue_activities_created = IssueActivity.objects.bulk_create(issue_activities)
issue_activities_created = IssueActivity.objects.bulk_create(
issue_activities
)
# Post the updates to segway for integrations and webhooks
if len(issue_activities_created):
# Don't send activities if the actor is a bot
@ -1570,7 +1631,9 @@ def issue_activity(
project_id=project_id,
subscriber=subscriber,
issue_activities_created=json.dumps(
IssueActivitySerializer(issue_activities_created, many=True).data,
IssueActivitySerializer(
issue_activities_created, many=True
).data,
cls=DjangoJSONEncoder,
),
requested_data=requested_data,

View file

@ -36,7 +36,9 @@ def archive_old_issues():
Q(
project=project_id,
archived_at__isnull=True,
updated_at__lte=(timezone.now() - timedelta(days=archive_in * 30)),
updated_at__lte=(
timezone.now() - timedelta(days=archive_in * 30)
),
state__group__in=["completed", "cancelled"],
),
Q(issue_cycle__isnull=True)
@ -46,7 +48,9 @@ def archive_old_issues():
),
Q(issue_module__isnull=True)
| (
Q(issue_module__module__target_date__lt=timezone.now().date())
Q(
issue_module__module__target_date__lt=timezone.now().date()
)
& Q(issue_module__isnull=False)
),
).filter(
@ -74,7 +78,9 @@ def archive_old_issues():
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(archive_at)}),
requested_data=json.dumps(
{"archived_at": str(archive_at)}
),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
@ -108,7 +114,9 @@ def close_old_issues():
Q(
project=project_id,
archived_at__isnull=True,
updated_at__lte=(timezone.now() - timedelta(days=close_in * 30)),
updated_at__lte=(
timezone.now() - timedelta(days=close_in * 30)
),
state__group__in=["backlog", "unstarted", "started"],
),
Q(issue_cycle__isnull=True)
@ -118,7 +126,9 @@ def close_old_issues():
),
Q(issue_module__isnull=True)
| (
Q(issue_module__module__target_date__lt=timezone.now().date())
Q(
issue_module__module__target_date__lt=timezone.now().date()
)
& Q(issue_module__isnull=False)
),
).filter(
@ -131,7 +141,9 @@ def close_old_issues():
# Check if Issues
if issues:
if project.default_state is None:
close_state = State.objects.filter(group="cancelled").first()
close_state = State.objects.filter(
group="cancelled"
).first()
else:
close_state = project.default_state
@ -165,4 +177,4 @@ def close_old_issues():
if settings.DEBUG:
print(e)
capture_exception(e)
return
return

View file

@ -33,7 +33,9 @@ def magic_link(email, key, token, current_site):
subject = f"Your unique Plane login code is {token}"
context = {"code": token, "email": email}
html_content = render_to_string("emails/auth/magic_signin.html", context)
html_content = render_to_string(
"emails/auth/magic_signin.html", context
)
text_content = strip_tags(html_content)
connection = get_connection(

View file

@ -12,7 +12,7 @@ from plane.db.models import (
Issue,
Notification,
IssueComment,
IssueActivity
IssueActivity,
)
# Third Party imports
@ -20,9 +20,9 @@ from celery import shared_task
from bs4 import BeautifulSoup
# =========== Issue Description Html Parsing and Notification Functions ======================
def update_mentions_for_issue(issue, project, new_mentions, removed_mention):
aggregated_issue_mentions = []
@ -32,14 +32,14 @@ def update_mentions_for_issue(issue, project, new_mentions, removed_mention):
mention_id=mention_id,
issue=issue,
project=project,
workspace_id=project.workspace_id
workspace_id=project.workspace_id,
)
)
IssueMention.objects.bulk_create(
aggregated_issue_mentions, batch_size=100)
IssueMention.objects.bulk_create(aggregated_issue_mentions, batch_size=100)
IssueMention.objects.filter(
issue=issue, mention__in=removed_mention).delete()
issue=issue, mention__in=removed_mention
).delete()
def get_new_mentions(requested_instance, current_instance):
@ -48,15 +48,17 @@ def get_new_mentions(requested_instance, current_instance):
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
mention for mention in mentions_newer if mention not in mentions_older
]
return new_mentions
# Get Removed Mention
@ -70,10 +72,12 @@ def get_removed_mentions(requested_instance, current_instance):
# Getting Set Difference from mentions_newer
removed_mentions = [
mention for mention in mentions_older if mention not in mentions_newer]
mention for mention in mentions_older if mention not in mentions_newer
]
return removed_mentions
# Adds mentions as subscribers
@ -84,27 +88,34 @@ def extract_mentions_as_subscribers(project_id, issue_id, mentions):
for mention_id in mentions:
# If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification
if not IssueSubscriber.objects.filter(
issue_id=issue_id,
subscriber_id=mention_id,
project_id=project_id,
).exists() and not IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id,
assignee_id=mention_id
).exists() and not Issue.objects.filter(
project_id=project_id, pk=issue_id, created_by_id=mention_id
).exists():
project = Project.objects.get(pk=project_id)
bulk_mention_subscribers.append(IssueSubscriber(
workspace_id=project.workspace_id,
project_id=project_id,
if (
not IssueSubscriber.objects.filter(
issue_id=issue_id,
subscriber_id=mention_id,
))
project_id=project_id,
).exists()
and not IssueAssignee.objects.filter(
project_id=project_id,
issue_id=issue_id,
assignee_id=mention_id,
).exists()
and not Issue.objects.filter(
project_id=project_id, pk=issue_id, created_by_id=mention_id
).exists()
):
project = Project.objects.get(pk=project_id)
bulk_mention_subscribers.append(
IssueSubscriber(
workspace_id=project.workspace_id,
project_id=project_id,
issue_id=issue_id,
subscriber_id=mention_id,
)
)
return bulk_mention_subscribers
# Parse Issue Description & extracts mentions
def extract_mentions(issue_instance):
try:
@ -113,46 +124,56 @@ def extract_mentions(issue_instance):
# Convert string to dictionary
data = json.loads(issue_instance)
html = data.get("description_html")
soup = BeautifulSoup(html, 'html.parser')
soup = BeautifulSoup(html, "html.parser")
mention_tags = soup.find_all(
'mention-component', attrs={'target': 'users'})
"mention-component", attrs={"target": "users"}
)
mentions = [mention_tag['id'] for mention_tag in mention_tags]
mentions = [mention_tag["id"] for mention_tag in mention_tags]
return list(set(mentions))
except Exception as e:
return []
# =========== Comment Parsing and Notification Functions ======================
def extract_comment_mentions(comment_value):
try:
mentions = []
soup = BeautifulSoup(comment_value, 'html.parser')
soup = BeautifulSoup(comment_value, "html.parser")
mentions_tags = soup.find_all(
'mention-component', attrs={'target': 'users'}
"mention-component", attrs={"target": "users"}
)
for mention_tag in mentions_tags:
mentions.append(mention_tag['id'])
mentions.append(mention_tag["id"])
return list(set(mentions))
except Exception as e:
return []
def get_new_comment_mentions(new_value, old_value):
mentions_newer = extract_comment_mentions(new_value)
if old_value is None:
return mentions_newer
mentions_older = extract_comment_mentions(old_value)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
mention for mention in mentions_newer if mention not in mentions_older
]
return new_mentions
def createMentionNotification(project, notification_comment, issue, actor_id, mention_id, issue_id, activity):
def createMentionNotification(
project,
notification_comment,
issue,
actor_id,
mention_id,
issue_id,
activity,
):
return Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mentioned",
@ -178,16 +199,26 @@ def createMentionNotification(project, notification_comment, issue, actor_id, me
"actor": str(activity.get("actor_id")),
"new_value": str(activity.get("new_value")),
"old_value": str(activity.get("old_value")),
}
},
},
)
@shared_task
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
def notifications(
type,
issue_id,
project_id,
actor_id,
subscriber,
issue_activities_created,
requested_data,
current_instance,
):
issue_activities_created = (
json.loads(
issue_activities_created) if issue_activities_created is not None else None
json.loads(issue_activities_created)
if issue_activities_created is not None
else None
)
if type not in [
"issue.activity.deleted",
@ -216,76 +247,110 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
# Get new mentions from the newer instance
new_mentions = get_new_mentions(
requested_instance=requested_data, current_instance=current_instance)
requested_instance=requested_data,
current_instance=current_instance,
)
removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance)
requested_instance=requested_data,
current_instance=current_instance,
)
comment_mentions = []
all_comment_mentions = []
# Get New Subscribers from the mentions of the newer instance
requested_mentions = extract_mentions(
issue_instance=requested_data)
requested_mentions = extract_mentions(issue_instance=requested_data)
mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id, issue_id=issue_id, mentions=requested_mentions)
project_id=project_id,
issue_id=issue_id,
mentions=requested_mentions,
)
for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment")
issue_comment_new_value = issue_activity.get("new_value")
issue_comment_old_value = issue_activity.get("old_value")
if issue_comment is not None:
# TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well.
all_comment_mentions = all_comment_mentions + extract_comment_mentions(issue_comment_new_value)
new_comment_mentions = get_new_comment_mentions(old_value=issue_comment_old_value, new_value=issue_comment_new_value)
all_comment_mentions = (
all_comment_mentions
+ extract_comment_mentions(issue_comment_new_value)
)
new_comment_mentions = get_new_comment_mentions(
old_value=issue_comment_old_value,
new_value=issue_comment_new_value,
)
comment_mentions = comment_mentions + new_comment_mentions
comment_mention_subscribers = extract_mentions_as_subscribers( project_id=project_id, issue_id=issue_id, mentions=all_comment_mentions)
comment_mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id,
issue_id=issue_id,
mentions=all_comment_mentions,
)
"""
We will not send subscription activity notification to the below mentioned user sets
- Those who have been newly mentioned in the issue description, we will send mention notification to them.
- When the activity is a comment_created and there exist a mention in the comment, then we have to send the "mention_in_comment" notification
- When the activity is a comment_updated and there exist a mention change, then also we have to send the "mention_in_comment" notification
"""
issue_assignees = list(
IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id)
project_id=project_id, issue_id=issue_id
)
.exclude(assignee_id__in=list(new_mentions + comment_mentions))
.values_list("assignee", flat=True)
)
issue_subscribers = list(
IssueSubscriber.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id__in=list(new_mentions + comment_mentions + [actor_id]))
project_id=project_id, issue_id=issue_id
)
.exclude(
subscriber_id__in=list(
new_mentions + comment_mentions + [actor_id]
)
)
.values_list("subscriber", flat=True)
)
issue = Issue.objects.filter(pk=issue_id).first()
if (issue.created_by_id is not None and str(issue.created_by_id) != str(actor_id)):
if issue.created_by_id is not None and str(issue.created_by_id) != str(
actor_id
):
issue_subscribers = issue_subscribers + [issue.created_by_id]
if subscriber:
# add the user to issue subscriber
try:
if str(issue.created_by_id) != str(actor_id) and uuid.UUID(actor_id) not in issue_assignees:
if (
str(issue.created_by_id) != str(actor_id)
and uuid.UUID(actor_id) not in issue_assignees
):
_ = IssueSubscriber.objects.get_or_create(
project_id=project_id, issue_id=issue_id, subscriber_id=actor_id
project_id=project_id,
issue_id=issue_id,
subscriber_id=actor_id,
)
except Exception as e:
pass
project = Project.objects.get(pk=project_id)
issue_subscribers = list(set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)})
issue_subscribers = list(
set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)}
)
for subscriber in issue_subscribers:
if subscriber in issue_subscribers:
sender = "in_app:issue_activities:subscribed"
if issue.created_by_id is not None and subscriber == issue.created_by_id:
if (
issue.created_by_id is not None
and subscriber == issue.created_by_id
):
sender = "in_app:issue_activities:created"
if subscriber in issue_assignees:
sender = "in_app:issue_activities:assigned"
@ -293,12 +358,16 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
for issue_activity in issue_activities_created:
# Do not send notification for description update
if issue_activity.get("field") == "description":
continue;
continue
issue_comment = issue_activity.get("issue_comment")
if issue_comment is not None:
issue_comment = IssueComment.objects.get(
id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id)
id=issue_comment,
issue_id=issue_id,
project_id=project_id,
workspace_id=project.workspace_id,
)
bulk_notifications.append(
Notification(
workspace=project.workspace,
@ -323,11 +392,16 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"verb": str(issue_activity.get("verb")),
"field": str(issue_activity.get("field")),
"actor": str(issue_activity.get("actor_id")),
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
"new_value": str(
issue_activity.get("new_value")
),
"old_value": str(
issue_activity.get("old_value")
),
"issue_comment": str(
issue_comment.comment_stripped
if issue_activity.get("issue_comment") is not None
if issue_activity.get("issue_comment")
is not None
else ""
),
},
@ -337,7 +411,8 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
# Add Mentioned as Issue Subscribers
IssueSubscriber.objects.bulk_create(
mention_subscribers + comment_mention_subscribers, batch_size=100)
mention_subscribers + comment_mention_subscribers, batch_size=100
)
last_activity = (
IssueActivity.objects.filter(issue_id=issue_id)
@ -346,9 +421,9 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
)
actor = User.objects.get(pk=actor_id)
for mention_id in comment_mentions:
if (mention_id != actor_id):
if mention_id != actor_id:
for issue_activity in issue_activities_created:
notification = createMentionNotification(
project=project,
@ -357,21 +432,20 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
actor_id=actor_id,
mention_id=mention_id,
issue_id=issue_id,
activity=issue_activity
activity=issue_activity,
)
bulk_notifications.append(notification)
for mention_id in new_mentions:
if (mention_id != actor_id):
if mention_id != actor_id:
if (
last_activity is not None
and last_activity.field == "description"
and actor_id == str(last_activity.actor_id)
):
bulk_notifications.append(
Notification(
workspace=project.workspace,
Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mentioned",
triggered_by_id=actor_id,
receiver_id=mention_id,
@ -383,22 +457,24 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"identifier": str(
issue.project.identifier
),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(last_activity.id),
"verb": str(last_activity.verb),
"field": str(last_activity.field),
"actor": str(last_activity.actor_id),
"new_value": str(last_activity.new_value),
"old_value": str(last_activity.old_value),
},
},
)
)
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(last_activity.id),
"verb": str(last_activity.verb),
"field": str(last_activity.field),
"actor": str(last_activity.actor_id),
"new_value": str(last_activity.new_value),
"old_value": str(last_activity.old_value),
},
},
)
)
else:
for issue_activity in issue_activities_created:
notification = createMentionNotification(
@ -408,15 +484,17 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
actor_id=actor_id,
mention_id=mention_id,
issue_id=issue_id,
activity=issue_activity
activity=issue_activity,
)
bulk_notifications.append(notification)
# save new mentions for the particular issue and remove the mentions that has been deleted from the description
update_mentions_for_issue(issue=issue, project=project, new_mentions=new_mentions,
removed_mention=removed_mention)
update_mentions_for_issue(
issue=issue,
project=project,
new_mentions=new_mentions,
removed_mention=removed_mention,
)
# Bulk create notifications
Notification.objects.bulk_create(bulk_notifications, batch_size=100)

View file

@ -15,6 +15,7 @@ from sentry_sdk import capture_exception
from plane.db.models import Project, User, ProjectMemberInvite
from plane.license.utils.instance_value import get_email_configuration
@shared_task
def project_invitation(email, project_id, token, current_site, invitor):
try:

View file

@ -189,7 +189,8 @@ def send_webhook(event, payload, kw, action, slug, bulk):
pk__in=[
str(event.get("issue")) for event in payload
]
).prefetch_related("issue_cycle", "issue_module"), many=True
).prefetch_related("issue_cycle", "issue_module"),
many=True,
).data
event = "issue"
action = "PATCH"
@ -197,7 +198,9 @@ def send_webhook(event, payload, kw, action, slug, bulk):
event_data = [
get_model_data(
event=event,
event_id=payload.get("id") if isinstance(payload, dict) else None,
event_id=payload.get("id")
if isinstance(payload, dict)
else None,
many=False,
)
]

View file

@ -36,7 +36,6 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
# The complete url including the domain
abs_url = str(current_site) + relative_link
(
EMAIL_HOST,
EMAIL_HOST_USER,