diff --git a/apps/api/plane/settings/storage.py b/apps/api/plane/settings/storage.py index e4a978bd2..1087cad0f 100644 --- a/apps/api/plane/settings/storage.py +++ b/apps/api/plane/settings/storage.py @@ -63,40 +63,50 @@ class S3Storage(S3Boto3Storage): ) def generate_presigned_post(self, object_name, file_type, file_size, expiration=None): - """Generate a presigned URL to upload an S3 object""" + """Generate a presigned URL for browser-direct upload. + + BB-PATCH (binarybeachio fork): method name preserved for caller + compat, but this now mints a presigned PUT URL — not POST. + + Why: Cloudflare R2 and Backblaze B2 — the two most common self-host + S3-compatible backends — do NOT implement S3 PostObject. Both return + HTTP 501 NotImplemented for the bucket-form POST verb that vanilla + Plane uses. Confirmed empirically against both backends 2026-04-30. + Rolling our own backend support isn't tractable; PUT is universally + supported (R2, B2, AWS S3, MinIO, Wasabi, etc). + + Tradeoff: we lose signed enforcement of `content-length-range`. Size + is still validated server-side at presign time via the `file_size` + param (see callers: 413 raised before we get here), so a determined + client could only over-upload by misreporting the size — they'd be + capped by the bucket's max-file-size at worst. + + See docs/features/storage-upload-flow.md in the binarybeachio repo + for the full decision record + rollback procedure (`git revert` this + commit and rebuild the images). + """ if expiration is None: expiration = self.signed_url_expiration - fields = {"Content-Type": file_type} - - conditions = [ - {"bucket": self.aws_storage_bucket_name}, - ["content-length-range", 1, file_size], - {"Content-Type": file_type}, - ] - - # Add condition for the object name (key) - if object_name.startswith("${filename}"): - conditions.append(["starts-with", "$key", object_name[: -len("${filename}")]]) - else: - fields["key"] = object_name - conditions.append({"key": object_name}) - - # Generate the presigned POST URL try: - # Generate a presigned URL for the S3 object - response = self.s3_client.generate_presigned_post( - Bucket=self.aws_storage_bucket_name, - Key=object_name, - Fields=fields, - Conditions=conditions, + url = self.s3_client.generate_presigned_url( + "put_object", + Params={ + "Bucket": self.aws_storage_bucket_name, + "Key": object_name, + "ContentType": file_type, + }, ExpiresIn=expiration, + HttpMethod="PUT", ) - # Handle errors except ClientError as e: - print(f"Error generating presigned POST URL: {e}") + print(f"Error generating presigned PUT URL: {e}") return None - return response + return { + "url": url, + "method": "PUT", + "fields": {"Content-Type": file_type, "key": object_name}, + } def _get_content_disposition(self, disposition, filename=None): """Helper method to generate Content-Disposition header value""" diff --git a/apps/api/plane/tests/unit/settings/test_storage.py b/apps/api/plane/tests/unit/settings/test_storage.py index 00856aeec..8cca3c2a8 100644 --- a/apps/api/plane/tests/unit/settings/test_storage.py +++ b/apps/api/plane/tests/unit/settings/test_storage.py @@ -63,13 +63,15 @@ class TestS3StorageSignedURLExpiration: ) @patch("plane.settings.storage.boto3") def test_generate_presigned_post_uses_default_expiration(self, mock_boto3): - """Test that generate_presigned_post uses the configured default expiration""" + """Test that generate_presigned_post uses the configured default expiration + + BB-PATCH: generate_presigned_post now mints a presigned PUT URL under + the hood (R2/B2 don't implement PostObject). Test asserts the + underlying generate_presigned_url call rather than generate_presigned_post. + """ # Mock the boto3 client and its response mock_s3_client = Mock() - mock_s3_client.generate_presigned_post.return_value = { - "url": "https://test-url.com", - "fields": {}, - } + mock_s3_client.generate_presigned_url.return_value = "https://test-url.com" mock_boto3.client.return_value = mock_s3_client # Create S3Storage instance @@ -79,9 +81,10 @@ class TestS3StorageSignedURLExpiration: storage.generate_presigned_post("test-object", "image/png", 1024) # Assert that the boto3 method was called with the default expiration (3600) - mock_s3_client.generate_presigned_post.assert_called_once() - call_kwargs = mock_s3_client.generate_presigned_post.call_args[1] + mock_s3_client.generate_presigned_url.assert_called_once() + call_kwargs = mock_s3_client.generate_presigned_url.call_args[1] assert call_kwargs["ExpiresIn"] == 3600 + assert call_kwargs["HttpMethod"] == "PUT" @patch.dict( os.environ, @@ -96,13 +99,14 @@ class TestS3StorageSignedURLExpiration: ) @patch("plane.settings.storage.boto3") def test_generate_presigned_post_uses_custom_expiration(self, mock_boto3): - """Test that generate_presigned_post uses custom expiration from env variable""" + """Test that generate_presigned_post uses custom expiration from env variable + + BB-PATCH: see test_generate_presigned_post_uses_default_expiration for + why this asserts generate_presigned_url instead of generate_presigned_post. + """ # Mock the boto3 client and its response mock_s3_client = Mock() - mock_s3_client.generate_presigned_post.return_value = { - "url": "https://test-url.com", - "fields": {}, - } + mock_s3_client.generate_presigned_url.return_value = "https://test-url.com" mock_boto3.client.return_value = mock_s3_client # Create S3Storage instance with SIGNED_URL_EXPIRATION=60 @@ -112,9 +116,10 @@ class TestS3StorageSignedURLExpiration: storage.generate_presigned_post("test-object", "image/png", 1024) # Assert that the boto3 method was called with custom expiration (60) - mock_s3_client.generate_presigned_post.assert_called_once() - call_kwargs = mock_s3_client.generate_presigned_post.call_args[1] + mock_s3_client.generate_presigned_url.assert_called_once() + call_kwargs = mock_s3_client.generate_presigned_url.call_args[1] assert call_kwargs["ExpiresIn"] == 60 + assert call_kwargs["HttpMethod"] == "PUT" @patch.dict( os.environ, diff --git a/apps/api/plane/utils/openapi/responses.py b/apps/api/plane/utils/openapi/responses.py index cb0f81dce..d98d5375c 100644 --- a/apps/api/plane/utils/openapi/responses.py +++ b/apps/api/plane/utils/openapi/responses.py @@ -415,12 +415,11 @@ GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE = OpenApiResponse( name="Generic Asset Upload Response", value={ "upload_data": { - "url": "https://s3.amazonaws.com/bucket-name", + "url": "https://s3.amazonaws.com/bucket-name/workspace-id/uuid-filename.pdf?X-Amz-Signature=...", + "method": "PUT", "fields": { + "Content-Type": "application/pdf", "key": "workspace-id/uuid-filename.pdf", - "AWSAccessKeyId": "AKIA...", - "policy": "eyJ...", - "signature": "abc123...", }, }, "asset_id": "550e8400-e29b-41d4-a716-446655440000", diff --git a/apps/web/core/services/file-upload.service.ts b/apps/web/core/services/file-upload.service.ts index f8f493967..cce8b0bbe 100644 --- a/apps/web/core/services/file-upload.service.ts +++ b/apps/web/core/services/file-upload.service.ts @@ -6,6 +6,8 @@ import type { AxiosRequestConfig } from "axios"; import axios from "axios"; +// plane services +import type { TFileUploadRequest } from "@plane/services"; // services import { APIService } from "@/services/api.service"; @@ -16,16 +18,18 @@ export class FileUploadService extends APIService { super(""); } + // BB-PATCH: dispatches on payload.method (PUT for fork default, POST kept + // for upstream-Plane parity). See packages/services/src/file/helper.ts. async uploadFile( - url: string, - data: FormData, + payload: TFileUploadRequest, uploadProgressHandler?: AxiosRequestConfig["onUploadProgress"] ): Promise { this.cancelSource = axios.CancelToken.source(); - return this.post(url, data, { - headers: { - "Content-Type": "multipart/form-data", - }, + return this.request({ + method: payload.method, + url: payload.url, + data: payload.body, + headers: payload.headers, cancelToken: this.cancelSource.token, withCredentials: false, onUploadProgress: uploadProgressHandler, diff --git a/apps/web/core/services/file.service.ts b/apps/web/core/services/file.service.ts index 6e252d76a..90669c768 100644 --- a/apps/web/core/services/file.service.ts +++ b/apps/web/core/services/file.service.ts @@ -83,11 +83,7 @@ export class FileService extends APIService { .then(async (response) => { const signedURLResponse: TFileSignedURLResponse = response?.data; const fileUploadPayload = generateFileUploadPayload(signedURLResponse, file); - await this.fileUploadService.uploadFile( - signedURLResponse.upload_data.url, - fileUploadPayload, - uploadProgressHandler - ); + await this.fileUploadService.uploadFile(fileUploadPayload, uploadProgressHandler); await this.updateWorkspaceAssetUploadStatus(workspaceSlug.toString(), signedURLResponse.asset_id); return signedURLResponse; }) @@ -160,11 +156,7 @@ export class FileService extends APIService { .then(async (response) => { const signedURLResponse: TFileSignedURLResponse = response?.data; const fileUploadPayload = generateFileUploadPayload(signedURLResponse, file); - await this.fileUploadService.uploadFile( - signedURLResponse.upload_data.url, - fileUploadPayload, - uploadProgressHandler - ); + await this.fileUploadService.uploadFile(fileUploadPayload, uploadProgressHandler); await this.updateProjectAssetUploadStatus(workspaceSlug, projectId, signedURLResponse.asset_id); return signedURLResponse; }) @@ -190,7 +182,7 @@ export class FileService extends APIService { .then(async (response) => { const signedURLResponse: TFileSignedURLResponse = response?.data; const fileUploadPayload = generateFileUploadPayload(signedURLResponse, file); - await this.fileUploadService.uploadFile(signedURLResponse.upload_data.url, fileUploadPayload); + await this.fileUploadService.uploadFile(fileUploadPayload); await this.updateUserAssetUploadStatus(signedURLResponse.asset_id); return signedURLResponse; }) diff --git a/apps/web/core/services/issue/issue_attachment.service.ts b/apps/web/core/services/issue/issue_attachment.service.ts index 98660ae6c..d53cbf3c8 100644 --- a/apps/web/core/services/issue/issue_attachment.service.ts +++ b/apps/web/core/services/issue/issue_attachment.service.ts @@ -55,11 +55,7 @@ export class IssueAttachmentService extends APIService { .then(async (response) => { const signedURLResponse: TIssueAttachmentUploadResponse = response?.data; const fileUploadPayload = generateFileUploadPayload(signedURLResponse, file); - await this.fileUploadService.uploadFile( - signedURLResponse.upload_data.url, - fileUploadPayload, - uploadProgressHandler - ); + await this.fileUploadService.uploadFile(fileUploadPayload, uploadProgressHandler); await this.updateIssueAttachmentUploadStatus(workspaceSlug, projectId, issueId, signedURLResponse.asset_id); return signedURLResponse.attachment; }) diff --git a/packages/services/src/file/file-upload.service.ts b/packages/services/src/file/file-upload.service.ts index 32ca4de51..8c004321e 100644 --- a/packages/services/src/file/file-upload.service.ts +++ b/packages/services/src/file/file-upload.service.ts @@ -7,6 +7,8 @@ import axios from "axios"; // api service import { APIService } from "../api.service"; +// helpers +import type { TFileUploadRequest } from "./helper"; /** * Service class for handling file upload operations @@ -21,18 +23,17 @@ export class FileUploadService extends APIService { } /** - * Uploads a file to the specified signed URL - * @param {string} url - The URL to upload the file to - * @param {FormData} data - The form data to upload - * @returns {Promise} Promise resolving to void - * @throws {Error} If the request fails + * Uploads a file to the presigned URL using the request descriptor produced + * by `generateFileUploadPayload`. BB-PATCH: dispatches on `payload.method` + * (PUT for the fork default, POST kept for upstream-Plane parity). */ - async uploadFile(url: string, data: FormData): Promise { + async uploadFile(payload: TFileUploadRequest): Promise { this.cancelSource = axios.CancelToken.source(); - return this.post(url, data, { - headers: { - "Content-Type": "multipart/form-data", - }, + return this.request({ + method: payload.method, + url: payload.url, + data: payload.body, + headers: payload.headers, cancelToken: this.cancelSource.token, withCredentials: false, }) diff --git a/packages/services/src/file/helper.ts b/packages/services/src/file/helper.ts index b8e962839..c7ac56015 100644 --- a/packages/services/src/file/helper.ts +++ b/packages/services/src/file/helper.ts @@ -49,17 +49,52 @@ const validateFilename = (filename: string): string | null => { return null; }; +// BB-PATCH (binarybeachio fork): upload payload is now a request descriptor +// (url+method+body+headers), not raw FormData. The fork mints presigned PUT +// URLs because R2/B2 don't implement PostObject — see backend storage.py +// docstring + docs/features/storage-upload-flow.md. +export type TFileUploadRequest = { + url: string; + method: "PUT" | "POST"; + body: File | FormData; + headers: Record; +}; + /** - * @description from the provided signed URL response, generate a payload to be used to upload the file + * @description Build a request descriptor for uploading the file using the + * presigned URL returned by the API. Dispatches on `upload_data.method`: + * - "PUT" (fork default): raw file body + Content-Type header + * - "POST" (vanilla AWS S3 path, kept for upstream parity): multipart form * @param {TFileSignedURLResponse} signedURLResponse * @param {File} file - * @returns {FormData} file upload request payload + * @returns {TFileUploadRequest} */ -export const generateFileUploadPayload = (signedURLResponse: TFileSignedURLResponse, file: File): FormData => { - const formData = new FormData(); - Object.entries(signedURLResponse.upload_data.fields).forEach(([key, value]) => formData.append(key, value)); - formData.append("file", file); - return formData; +export const generateFileUploadPayload = ( + signedURLResponse: TFileSignedURLResponse, + file: File +): TFileUploadRequest => { + const data = signedURLResponse.upload_data; + if (data.method === "POST") { + const formData = new FormData(); + Object.entries(data.fields).forEach( + ([key, value]) => value != null && formData.append(key, value as string) + ); + formData.append("file", file); + return { + url: data.url, + method: "POST", + body: formData, + headers: { "Content-Type": "multipart/form-data" }, + }; + } + return { + url: data.url, + method: "PUT", + body: file, + headers: { + "Content-Type": file.type || data.fields["Content-Type"] || "application/octet-stream", + }, + }; }; /** diff --git a/packages/services/src/file/sites-file.service.ts b/packages/services/src/file/sites-file.service.ts index debe945f8..034e2fc60 100644 --- a/packages/services/src/file/sites-file.service.ts +++ b/packages/services/src/file/sites-file.service.ts @@ -88,7 +88,7 @@ export class SitesFileService extends FileService { .then(async (response) => { const signedURLResponse: TFileSignedURLResponse = response?.data; const fileUploadPayload = generateFileUploadPayload(signedURLResponse, file); - await this.fileUploadService.uploadFile(signedURLResponse.upload_data.url, fileUploadPayload); + await this.fileUploadService.uploadFile(fileUploadPayload); await this.updateAssetUploadStatus(anchor, signedURLResponse.asset_id); return signedURLResponse; }) diff --git a/packages/types/src/file.ts b/packages/types/src/file.ts index 01a189259..48fa104ea 100644 --- a/packages/types/src/file.ts +++ b/packages/types/src/file.ts @@ -20,19 +20,19 @@ export type TFileEntityInfo = { export type TFileMetaData = TFileMetaDataLite & TFileEntityInfo; +// BB-PATCH (binarybeachio fork): upload now uses presigned PUT (not POST). +// `method` and the trimmed `fields` shape reflect that. See backend +// plane/settings/storage.py docstring + docs/features/storage-upload-flow.md +// in binarybeachio for the full decision record. export type TFileSignedURLResponse = { asset_id: string; asset_url: string; upload_data: { url: string; + method?: "PUT" | "POST"; fields: { "Content-Type": string; key: string; - "x-amz-algorithm": string; - "x-amz-credential": string; - "x-amz-date": string; - policy: string; - "x-amz-signature": string; }; }; };