* chore: migrations and backmigration to move attachments to file asset * chore: move attachments to file assets * chore: update migration file to include created by and updated by and size * chore: remove uninmport errors * chore: make size as float field * fix: file asset uploads * chore: asset uploads migration changes * chore: v2 assets endpoint * chore: remove unused imports * chore: issue attachments * chore: issue attachments * chore: workspace logo endpoints * chore: private bucket changes * chore: user asset endpoint * chore: add logo_url validation * chore: cover image urlk * chore: change asset max length * chore: pages endpoint * chore: store the storage_metadata only when none * chore: attachment asset apis * chore: update create private bucket * chore: make bucket private * chore: fix response of user uploads * fix: response of user uploads * fix: job to fix file asset uploads * fix: user asset endpoints * chore: avatar for user profile * chore: external apis user url endpoint * chore: upload workspace and user asset actions updated * chore: analytics endpoint * fix: analytics export * chore: avatar urls * chore: update user avatar instances * chore: avatar urls for assignees and creators * chore: bucket permission script * fix: all user avatr instances in the web app * chore: update project cover image logic * fix: issue attachment endpoint * chore: patch endpoint for issue attachment * chore: attachments * chore: change attachment storage class * chore: update issue attachment endpoints * fix: issue attachment * chore: update issue attachment implementation * chore: page asset endpoints * fix: web build errors * chore: attachments * chore: page asset urls * chore: comment and issue asset endpoints * chore: asset endpoints * chore: attachment endpoints * chore: bulk asset endpoint * chore: restore endpoint * chore: project assets endpoints * chore: asset url * chore: add delete asset endpoints * chore: fix asset upload endpoint * chore: update patch endpoints * chore: update patch endpoint * chore: update editor image handling * chore: asset restore endpoints * chore: avatar url for space assets * chore: space app assets migration * fix: space app urls * chore: space endpoints * fix: old editor images rendering logic * fix: issue archive and attachment activity * chore: asset deletes * chore: attachment delete * fix: issue attachment * fix: issue attachment get * chore: cover image url for projects * chore: remove duplicate py file * fix: url check function * chore: chore project cover asset delete * fix: migrations * chore: delete migration files * chore: update bucket * fix: build errors * chore: add asset url in intake attachment * chore: project cover fix * chore: update next.config * chore: delete old workspace logos * chore: workspace assets * chore: asset get for space * chore: update project modal * chore: remove unused imports * fix: space app editor helper * chore: update rich-text read-only editor * chore: create multiple column for entity identifiers * chore: update migrations * chore: remove entity identifier * fix: issue assets * chore: update maximum file size logic * chore: update editor max file size logic * fix: close modal after removing workspace logo * chore: update uploaded asstes' status post issue creation * chore: added file size limit to the space app * dev: add file size limit restriction on all endpoints * fix: remove old workspace logo and user avatar --------- Co-authored-by: pablohashescobar <nikhilschacko@gmail.com>
513 lines
14 KiB
Python
513 lines
14 KiB
Python
# Python imports
|
|
import csv
|
|
import io
|
|
import logging
|
|
|
|
# Third party imports
|
|
from celery import shared_task
|
|
|
|
# Django imports
|
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
|
from django.template.loader import render_to_string
|
|
from django.utils.html import strip_tags
|
|
|
|
# Module imports
|
|
from plane.db.models import Issue
|
|
from plane.license.utils.instance_value import get_email_configuration
|
|
from plane.utils.analytics_plot import build_graph_plot
|
|
from plane.utils.exception_logger import log_exception
|
|
from plane.utils.issue_filters import issue_filters
|
|
|
|
row_mapping = {
|
|
"state__name": "State",
|
|
"state__group": "State Group",
|
|
"labels__id": "Label",
|
|
"assignees__id": "Assignee Name",
|
|
"start_date": "Start Date",
|
|
"target_date": "Due Date",
|
|
"completed_at": "Completed At",
|
|
"created_at": "Created At",
|
|
"issue_count": "Issue Count",
|
|
"priority": "Priority",
|
|
"estimate": "Estimate",
|
|
"issue_cycle__cycle_id": "Cycle",
|
|
"issue_module__module_id": "Module",
|
|
}
|
|
|
|
ASSIGNEE_ID = "assignees__id"
|
|
LABEL_ID = "labels__id"
|
|
STATE_ID = "state_id"
|
|
CYCLE_ID = "issue_cycle__cycle_id"
|
|
MODULE_ID = "issue_module__module_id"
|
|
|
|
|
|
def send_export_email(email, slug, csv_buffer, rows):
|
|
"""Helper function to send export email."""
|
|
subject = "Your Export is ready"
|
|
html_content = render_to_string("emails/exports/analytics.html", {})
|
|
text_content = strip_tags(html_content)
|
|
|
|
csv_buffer.seek(0)
|
|
|
|
(
|
|
EMAIL_HOST,
|
|
EMAIL_HOST_USER,
|
|
EMAIL_HOST_PASSWORD,
|
|
EMAIL_PORT,
|
|
EMAIL_USE_TLS,
|
|
EMAIL_USE_SSL,
|
|
EMAIL_FROM,
|
|
) = get_email_configuration()
|
|
|
|
connection = get_connection(
|
|
host=EMAIL_HOST,
|
|
port=int(EMAIL_PORT),
|
|
username=EMAIL_HOST_USER,
|
|
password=EMAIL_HOST_PASSWORD,
|
|
use_tls=EMAIL_USE_TLS == "1",
|
|
use_ssl=EMAIL_USE_SSL == "1",
|
|
)
|
|
|
|
msg = EmailMultiAlternatives(
|
|
subject=subject,
|
|
body=text_content,
|
|
from_email=EMAIL_FROM,
|
|
to=[email],
|
|
connection=connection,
|
|
)
|
|
msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue())
|
|
msg.send(fail_silently=False)
|
|
return
|
|
|
|
|
|
def get_assignee_details(slug, filters):
|
|
"""Fetch assignee details if required."""
|
|
return (
|
|
Issue.issue_objects.filter(
|
|
workspace__slug=slug, **filters, assignees__avatar__isnull=False
|
|
)
|
|
.distinct("assignees__id")
|
|
.order_by("assignees__id")
|
|
.values(
|
|
"assignees__avatar_url",
|
|
"assignees__display_name",
|
|
"assignees__first_name",
|
|
"assignees__last_name",
|
|
"assignees__id",
|
|
)
|
|
)
|
|
|
|
|
|
def get_label_details(slug, filters):
|
|
"""Fetch label details if required"""
|
|
return (
|
|
Issue.objects.filter(
|
|
workspace__slug=slug, **filters, labels__id__isnull=False
|
|
)
|
|
.distinct("labels__id")
|
|
.order_by("labels__id")
|
|
.values("labels__id", "labels__color", "labels__name")
|
|
)
|
|
|
|
|
|
def get_state_details(slug, filters):
|
|
return (
|
|
Issue.issue_objects.filter(
|
|
workspace__slug=slug,
|
|
**filters,
|
|
)
|
|
.distinct("state_id")
|
|
.order_by("state_id")
|
|
.values("state_id", "state__name", "state__color")
|
|
)
|
|
|
|
|
|
def get_module_details(slug, filters):
|
|
return (
|
|
Issue.issue_objects.filter(
|
|
workspace__slug=slug,
|
|
**filters,
|
|
issue_module__module_id__isnull=False,
|
|
)
|
|
.distinct("issue_module__module_id")
|
|
.order_by("issue_module__module_id")
|
|
.values(
|
|
"issue_module__module_id",
|
|
"issue_module__module__name",
|
|
)
|
|
)
|
|
|
|
|
|
def get_cycle_details(slug, filters):
|
|
return (
|
|
Issue.issue_objects.filter(
|
|
workspace__slug=slug,
|
|
**filters,
|
|
issue_cycle__cycle_id__isnull=False,
|
|
)
|
|
.distinct("issue_cycle__cycle_id")
|
|
.order_by("issue_cycle__cycle_id")
|
|
.values(
|
|
"issue_cycle__cycle_id",
|
|
"issue_cycle__cycle__name",
|
|
)
|
|
)
|
|
|
|
|
|
def generate_csv_from_rows(rows):
|
|
"""Generate CSV buffer from rows."""
|
|
csv_buffer = io.StringIO()
|
|
writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL)
|
|
[writer.writerow(row) for row in rows]
|
|
return csv_buffer
|
|
|
|
|
|
def generate_segmented_rows(
|
|
distribution,
|
|
x_axis,
|
|
y_axis,
|
|
segment,
|
|
key,
|
|
assignee_details,
|
|
label_details,
|
|
state_details,
|
|
cycle_details,
|
|
module_details,
|
|
):
|
|
segment_zero = list(
|
|
set(
|
|
item.get("segment")
|
|
for sublist in distribution.values()
|
|
for item in sublist
|
|
)
|
|
)
|
|
|
|
segmented = segment
|
|
|
|
row_zero = [
|
|
row_mapping.get(x_axis, "X-Axis"),
|
|
row_mapping.get(y_axis, "Y-Axis"),
|
|
] + segment_zero
|
|
|
|
rows = []
|
|
for item, data in distribution.items():
|
|
generated_row = [
|
|
item,
|
|
sum(obj.get(key) for obj in data if obj.get(key) is not None),
|
|
]
|
|
|
|
for segment in segment_zero:
|
|
value = next(
|
|
(x.get(key) for x in data if x.get("segment") == segment), "0"
|
|
)
|
|
generated_row.append(value)
|
|
|
|
if x_axis == ASSIGNEE_ID:
|
|
assignee = next(
|
|
(
|
|
user
|
|
for user in assignee_details
|
|
if str(user[ASSIGNEE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
if assignee:
|
|
generated_row[0] = (
|
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
|
)
|
|
|
|
if x_axis == LABEL_ID:
|
|
label = next(
|
|
(
|
|
lab
|
|
for lab in label_details
|
|
if str(lab[LABEL_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if label:
|
|
generated_row[0] = f"{label['labels__name']}"
|
|
|
|
if x_axis == STATE_ID:
|
|
state = next(
|
|
(
|
|
sta
|
|
for sta in state_details
|
|
if str(sta[STATE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if state:
|
|
generated_row[0] = f"{state['state__name']}"
|
|
|
|
if x_axis == CYCLE_ID:
|
|
cycle = next(
|
|
(
|
|
cyc
|
|
for cyc in cycle_details
|
|
if str(cyc[CYCLE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if cycle:
|
|
generated_row[0] = f"{cycle['issue_cycle__cycle__name']}"
|
|
|
|
if x_axis == MODULE_ID:
|
|
module = next(
|
|
(
|
|
mod
|
|
for mod in module_details
|
|
if str(mod[MODULE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if module:
|
|
generated_row[0] = f"{module['issue_module__module__name']}"
|
|
|
|
rows.append(tuple(generated_row))
|
|
|
|
if segmented == ASSIGNEE_ID:
|
|
for index, segm in enumerate(row_zero[2:]):
|
|
assignee = next(
|
|
(
|
|
user
|
|
for user in assignee_details
|
|
if str(user[ASSIGNEE_ID]) == str(segm)
|
|
),
|
|
None,
|
|
)
|
|
if assignee:
|
|
row_zero[index + 2] = (
|
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
|
)
|
|
|
|
if segmented == LABEL_ID:
|
|
for index, segm in enumerate(row_zero[2:]):
|
|
label = next(
|
|
(
|
|
lab
|
|
for lab in label_details
|
|
if str(lab[LABEL_ID]) == str(segm)
|
|
),
|
|
None,
|
|
)
|
|
if label:
|
|
row_zero[index + 2] = label["labels__name"]
|
|
|
|
if segmented == STATE_ID:
|
|
for index, segm in enumerate(row_zero[2:]):
|
|
state = next(
|
|
(
|
|
sta
|
|
for sta in state_details
|
|
if str(sta[STATE_ID]) == str(segm)
|
|
),
|
|
None,
|
|
)
|
|
if state:
|
|
row_zero[index + 2] = state["state__name"]
|
|
|
|
if segmented == MODULE_ID:
|
|
for index, segm in enumerate(row_zero[2:]):
|
|
module = next(
|
|
(
|
|
mod
|
|
for mod in label_details
|
|
if str(mod[MODULE_ID]) == str(segm)
|
|
),
|
|
None,
|
|
)
|
|
if module:
|
|
row_zero[index + 2] = module["issue_module__module__name"]
|
|
|
|
if segmented == CYCLE_ID:
|
|
for index, segm in enumerate(row_zero[2:]):
|
|
cycle = next(
|
|
(
|
|
cyc
|
|
for cyc in cycle_details
|
|
if str(cyc[CYCLE_ID]) == str(segm)
|
|
),
|
|
None,
|
|
)
|
|
if cycle:
|
|
row_zero[index + 2] = cycle["issue_cycle__cycle__name"]
|
|
|
|
return [tuple(row_zero)] + rows
|
|
|
|
|
|
def generate_non_segmented_rows(
|
|
distribution,
|
|
x_axis,
|
|
y_axis,
|
|
key,
|
|
assignee_details,
|
|
label_details,
|
|
state_details,
|
|
cycle_details,
|
|
module_details,
|
|
):
|
|
rows = []
|
|
for item, data in distribution.items():
|
|
row = [
|
|
item,
|
|
data[0].get("count" if y_axis == "issue_count" else "estimate"),
|
|
]
|
|
|
|
if x_axis == ASSIGNEE_ID:
|
|
assignee = next(
|
|
(
|
|
user
|
|
for user in assignee_details
|
|
if str(user[ASSIGNEE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
if assignee:
|
|
row[0] = (
|
|
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
|
|
)
|
|
|
|
if x_axis == LABEL_ID:
|
|
label = next(
|
|
(
|
|
lab
|
|
for lab in label_details
|
|
if str(lab[LABEL_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if label:
|
|
row[0] = f"{label['labels__name']}"
|
|
|
|
if x_axis == STATE_ID:
|
|
state = next(
|
|
(
|
|
sta
|
|
for sta in state_details
|
|
if str(sta[STATE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if state:
|
|
row[0] = f"{state['state__name']}"
|
|
|
|
if x_axis == CYCLE_ID:
|
|
cycle = next(
|
|
(
|
|
cyc
|
|
for cyc in cycle_details
|
|
if str(cyc[CYCLE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if cycle:
|
|
row[0] = f"{cycle['issue_cycle__cycle__name']}"
|
|
|
|
if x_axis == MODULE_ID:
|
|
module = next(
|
|
(
|
|
mod
|
|
for mod in module_details
|
|
if str(mod[MODULE_ID]) == str(item)
|
|
),
|
|
None,
|
|
)
|
|
|
|
if module:
|
|
row[0] = f"{module['issue_module__module__name']}"
|
|
|
|
rows.append(tuple(row))
|
|
|
|
row_zero = [
|
|
row_mapping.get(x_axis, "X-Axis"),
|
|
row_mapping.get(y_axis, "Y-Axis"),
|
|
]
|
|
return [tuple(row_zero)] + rows
|
|
|
|
|
|
@shared_task
|
|
def analytic_export_task(email, data, slug):
|
|
try:
|
|
filters = issue_filters(data, "POST")
|
|
queryset = Issue.issue_objects.filter(**filters, workspace__slug=slug)
|
|
|
|
x_axis = data.get("x_axis", False)
|
|
y_axis = data.get("y_axis", False)
|
|
segment = data.get("segment", False)
|
|
|
|
distribution = build_graph_plot(
|
|
queryset, x_axis=x_axis, y_axis=y_axis, segment=segment
|
|
)
|
|
key = "count" if y_axis == "issue_count" else "estimate"
|
|
|
|
assignee_details = (
|
|
get_assignee_details(slug, filters)
|
|
if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID
|
|
else {}
|
|
)
|
|
|
|
label_details = (
|
|
get_label_details(slug, filters)
|
|
if x_axis == LABEL_ID or segment == LABEL_ID
|
|
else {}
|
|
)
|
|
|
|
state_details = (
|
|
get_state_details(slug, filters)
|
|
if x_axis == STATE_ID or segment == STATE_ID
|
|
else {}
|
|
)
|
|
|
|
cycle_details = (
|
|
get_cycle_details(slug, filters)
|
|
if x_axis == CYCLE_ID or segment == CYCLE_ID
|
|
else {}
|
|
)
|
|
|
|
module_details = (
|
|
get_module_details(slug, filters)
|
|
if x_axis == MODULE_ID or segment == MODULE_ID
|
|
else {}
|
|
)
|
|
|
|
if segment:
|
|
rows = generate_segmented_rows(
|
|
distribution,
|
|
x_axis,
|
|
y_axis,
|
|
segment,
|
|
key,
|
|
assignee_details,
|
|
label_details,
|
|
state_details,
|
|
cycle_details,
|
|
module_details,
|
|
)
|
|
else:
|
|
rows = generate_non_segmented_rows(
|
|
distribution,
|
|
x_axis,
|
|
y_axis,
|
|
key,
|
|
assignee_details,
|
|
label_details,
|
|
state_details,
|
|
cycle_details,
|
|
module_details,
|
|
)
|
|
|
|
csv_buffer = generate_csv_from_rows(rows)
|
|
send_export_email(email, slug, csv_buffer, rows)
|
|
logging.getLogger("plane").info("Email sent succesfully.")
|
|
return
|
|
except Exception as e:
|
|
log_exception(e)
|
|
return
|