[WEB-4346] chore: refactor copy_s3_object bg task (#7298)

This commit is contained in:
Sangeetha 2025-07-17 13:10:34 +05:30 committed by GitHub
parent 6bb79df0eb
commit e313aee3df
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 239 additions and 46 deletions

View file

@ -83,8 +83,52 @@ def sync_with_external_service(entity_name, description_html):
return {}
def copy_assets(entity, entity_identifier, project_id, asset_ids, user_id):
duplicated_assets = []
workspace = entity.workspace
storage = S3Storage()
original_assets = FileAsset.objects.filter(
workspace=workspace, project_id=project_id, id__in=asset_ids
)
for original_asset in original_assets:
destination_key = (
f"{workspace.id}/{uuid.uuid4().hex}-{original_asset.attributes.get('name')}"
)
duplicated_asset = FileAsset.objects.create(
attributes={
"name": original_asset.attributes.get("name"),
"type": original_asset.attributes.get("type"),
"size": original_asset.attributes.get("size"),
},
asset=destination_key,
size=original_asset.size,
workspace=workspace,
created_by_id=user_id,
entity_type=original_asset.entity_type,
project_id=project_id,
storage_metadata=original_asset.storage_metadata,
**get_entity_id_field(original_asset.entity_type, entity_identifier),
)
storage.copy_object(original_asset.asset, destination_key)
duplicated_assets.append(
{
"new_asset_id": str(duplicated_asset.id),
"old_asset_id": str(original_asset.id),
}
)
if duplicated_assets:
FileAsset.objects.filter(
pk__in=[item["new_asset_id"] for item in duplicated_assets]
).update(is_uploaded=True)
return duplicated_assets
@shared_task
def copy_s3_objects(entity_name, entity_identifier, project_id, slug, user_id):
def copy_s3_objects_of_description_and_assets(
entity_name, entity_identifier, project_id, slug, user_id
):
"""
Step 1: Extract asset ids from the description_html of the entity
Step 2: Duplicate the assets
@ -100,53 +144,20 @@ def copy_s3_objects(entity_name, entity_identifier, project_id, slug, user_id):
entity = model_class.objects.get(id=entity_identifier)
asset_ids = extract_asset_ids(entity.description_html, "image-component")
duplicated_assets = []
workspace = entity.workspace
storage = S3Storage()
original_assets = FileAsset.objects.filter(
workspace=workspace, project_id=project_id, id__in=asset_ids
duplicated_assets = copy_assets(
entity, entity_identifier, project_id, asset_ids, user_id
)
for original_asset in original_assets:
destination_key = f"{workspace.id}/{uuid.uuid4().hex}-{original_asset.attributes.get('name')}"
duplicated_asset = FileAsset.objects.create(
attributes={
"name": original_asset.attributes.get("name"),
"type": original_asset.attributes.get("type"),
"size": original_asset.attributes.get("size"),
},
asset=destination_key,
size=original_asset.size,
workspace=workspace,
created_by_id=user_id,
entity_type=original_asset.entity_type,
project_id=project_id,
storage_metadata=original_asset.storage_metadata,
**get_entity_id_field(original_asset.entity_type, entity_identifier),
)
storage.copy_object(original_asset.asset, destination_key)
duplicated_assets.append(
{
"new_asset_id": str(duplicated_asset.id),
"old_asset_id": str(original_asset.id),
}
)
updated_html = update_description(entity, duplicated_assets, "image-component")
if duplicated_assets:
FileAsset.objects.filter(
pk__in=[item["new_asset_id"] for item in duplicated_assets]
).update(is_uploaded=True)
updated_html = update_description(
entity, duplicated_assets, "image-component"
)
external_data = sync_with_external_service(entity_name, updated_html)
external_data = sync_with_external_service(entity_name, updated_html)
if external_data:
entity.description = external_data.get("description")
entity.description_binary = base64.b64decode(
external_data.get("description_binary")
)
entity.save()
if external_data:
entity.description = external_data.get("description")
entity.description_binary = base64.b64decode(
external_data.get("description_binary")
)
entity.save()
return
except Exception as e: