* chore: migrations and backmigration to move attachments to file asset * chore: move attachments to file assets * chore: update migration file to include created by and updated by and size * chore: remove uninmport errors * chore: make size as float field * fix: file asset uploads * chore: asset uploads migration changes * chore: v2 assets endpoint * chore: remove unused imports * chore: issue attachments * chore: issue attachments * chore: workspace logo endpoints * chore: private bucket changes * chore: user asset endpoint * chore: add logo_url validation * chore: cover image urlk * chore: change asset max length * chore: pages endpoint * chore: store the storage_metadata only when none * chore: attachment asset apis * chore: update create private bucket * chore: make bucket private * chore: fix response of user uploads * fix: response of user uploads * fix: job to fix file asset uploads * fix: user asset endpoints * chore: avatar for user profile * chore: external apis user url endpoint * chore: upload workspace and user asset actions updated * chore: analytics endpoint * fix: analytics export * chore: avatar urls * chore: update user avatar instances * chore: avatar urls for assignees and creators * chore: bucket permission script * fix: all user avatr instances in the web app * chore: update project cover image logic * fix: issue attachment endpoint * chore: patch endpoint for issue attachment * chore: attachments * chore: change attachment storage class * chore: update issue attachment endpoints * fix: issue attachment * chore: update issue attachment implementation * chore: page asset endpoints * fix: web build errors * chore: attachments * chore: page asset urls * chore: comment and issue asset endpoints * chore: asset endpoints * chore: attachment endpoints * chore: bulk asset endpoint * chore: restore endpoint * chore: project assets endpoints * chore: asset url * chore: add delete asset endpoints * chore: fix asset upload endpoint * chore: update patch endpoints * chore: update patch endpoint * chore: update editor image handling * chore: asset restore endpoints * chore: avatar url for space assets * chore: space app assets migration * fix: space app urls * chore: space endpoints * fix: old editor images rendering logic * fix: issue archive and attachment activity * chore: asset deletes * chore: attachment delete * fix: issue attachment * fix: issue attachment get * chore: cover image url for projects * chore: remove duplicate py file * fix: url check function * chore: chore project cover asset delete * fix: migrations * chore: delete migration files * chore: update bucket * fix: build errors * chore: add asset url in intake attachment * chore: project cover fix * chore: update next.config * chore: delete old workspace logos * chore: workspace assets * chore: asset get for space * chore: update project modal * chore: remove unused imports * fix: space app editor helper * chore: update rich-text read-only editor * chore: create multiple column for entity identifiers * chore: update migrations * chore: remove entity identifier * fix: issue assets * chore: update maximum file size logic * chore: update editor max file size logic * fix: close modal after removing workspace logo * chore: update uploaded asstes' status post issue creation * chore: added file size limit to the space app * dev: add file size limit restriction on all endpoints * fix: remove old workspace logo and user avatar --------- Co-authored-by: pablohashescobar <nikhilschacko@gmail.com>
154 lines
5.4 KiB
Python
154 lines
5.4 KiB
Python
# Python imports
|
|
import os
|
|
|
|
# Third party imports
|
|
import boto3
|
|
from botocore.exceptions import ClientError
|
|
from urllib.parse import quote
|
|
|
|
# Module imports
|
|
from plane.utils.exception_logger import log_exception
|
|
from storages.backends.s3boto3 import S3Boto3Storage
|
|
|
|
|
|
class S3Storage(S3Boto3Storage):
|
|
|
|
def url(self, name, parameters=None, expire=None, http_method=None):
|
|
return name
|
|
|
|
"""S3 storage class to generate presigned URLs for S3 objects"""
|
|
|
|
def __init__(self, request=None):
|
|
# Get the AWS credentials and bucket name from the environment
|
|
self.aws_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
|
|
# Use the AWS_SECRET_ACCESS_KEY environment variable for the secret key
|
|
self.aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
|
|
# Use the AWS_S3_BUCKET_NAME environment variable for the bucket name
|
|
self.aws_storage_bucket_name = os.environ.get("AWS_S3_BUCKET_NAME")
|
|
# Use the AWS_REGION environment variable for the region
|
|
self.aws_region = os.environ.get("AWS_REGION")
|
|
# Use the AWS_S3_ENDPOINT_URL environment variable for the endpoint URL
|
|
self.aws_s3_endpoint_url = os.environ.get(
|
|
"AWS_S3_ENDPOINT_URL"
|
|
) or os.environ.get("MINIO_ENDPOINT_URL")
|
|
|
|
if os.environ.get("USE_MINIO") == "1":
|
|
# Create an S3 client for MinIO
|
|
self.s3_client = boto3.client(
|
|
"s3",
|
|
aws_access_key_id=self.aws_access_key_id,
|
|
aws_secret_access_key=self.aws_secret_access_key,
|
|
region_name=self.aws_region,
|
|
endpoint_url=f"{request.scheme}://{request.get_host()}",
|
|
config=boto3.session.Config(signature_version="s3v4"),
|
|
)
|
|
else:
|
|
# Create an S3 client
|
|
self.s3_client = boto3.client(
|
|
"s3",
|
|
aws_access_key_id=self.aws_access_key_id,
|
|
aws_secret_access_key=self.aws_secret_access_key,
|
|
region_name=self.aws_region,
|
|
endpoint_url=self.aws_s3_endpoint_url,
|
|
config=boto3.session.Config(signature_version="s3v4"),
|
|
)
|
|
|
|
def generate_presigned_post(
|
|
self, object_name, file_type, file_size, expiration=3600
|
|
):
|
|
"""Generate a presigned URL to upload an S3 object"""
|
|
fields = {
|
|
"Content-Type": file_type,
|
|
}
|
|
|
|
conditions = [
|
|
{"bucket": self.aws_storage_bucket_name},
|
|
["content-length-range", 1, file_size],
|
|
{"Content-Type": file_type},
|
|
]
|
|
|
|
# Add condition for the object name (key)
|
|
if object_name.startswith("${filename}"):
|
|
conditions.append(
|
|
["starts-with", "$key", object_name[: -len("${filename}")]]
|
|
)
|
|
else:
|
|
fields["key"] = object_name
|
|
conditions.append({"key": object_name})
|
|
|
|
# Generate the presigned POST URL
|
|
try:
|
|
# Generate a presigned URL for the S3 object
|
|
response = self.s3_client.generate_presigned_post(
|
|
Bucket=self.aws_storage_bucket_name,
|
|
Key=object_name,
|
|
Fields=fields,
|
|
Conditions=conditions,
|
|
ExpiresIn=expiration,
|
|
)
|
|
# Handle errors
|
|
except ClientError as e:
|
|
print(f"Error generating presigned POST URL: {e}")
|
|
return None
|
|
|
|
return response
|
|
|
|
def _get_content_disposition(self, disposition, filename=None):
|
|
"""Helper method to generate Content-Disposition header value"""
|
|
if filename:
|
|
# Encode the filename to handle special characters
|
|
encoded_filename = quote(filename)
|
|
return f"{disposition}; filename*=UTF-8''{encoded_filename}"
|
|
return disposition
|
|
|
|
def generate_presigned_url(
|
|
self,
|
|
object_name,
|
|
expiration=3600,
|
|
http_method="GET",
|
|
disposition="inline",
|
|
filename=None,
|
|
):
|
|
content_disposition = self._get_content_disposition(
|
|
disposition, filename
|
|
)
|
|
"""Generate a presigned URL to share an S3 object"""
|
|
try:
|
|
response = self.s3_client.generate_presigned_url(
|
|
"get_object",
|
|
Params={
|
|
"Bucket": self.aws_storage_bucket_name,
|
|
"Key": str(object_name),
|
|
"ResponseContentDisposition": content_disposition,
|
|
},
|
|
ExpiresIn=expiration,
|
|
HttpMethod=http_method,
|
|
)
|
|
except ClientError as e:
|
|
log_exception(e)
|
|
return None
|
|
|
|
# The response contains the presigned URL
|
|
return response
|
|
|
|
def get_object_metadata(self, object_name):
|
|
"""Get the metadata for an S3 object"""
|
|
try:
|
|
response = self.s3_client.head_object(
|
|
Bucket=self.aws_storage_bucket_name, Key=object_name
|
|
)
|
|
except ClientError as e:
|
|
log_exception(e)
|
|
return None
|
|
|
|
return {
|
|
"ContentType": response.get("ContentType"),
|
|
"ContentLength": response.get("ContentLength"),
|
|
"LastModified": (
|
|
response.get("LastModified").isoformat()
|
|
if response.get("LastModified")
|
|
else None
|
|
),
|
|
"ETag": response.get("ETag"),
|
|
"Metadata": response.get("Metadata", {}),
|
|
}
|