diff --git a/client/webui/frontend/src/lib/components/projects/KnowledgeSection.tsx b/client/webui/frontend/src/lib/components/projects/KnowledgeSection.tsx index 9b43df6c95..dc766974bf 100644 --- a/client/webui/frontend/src/lib/components/projects/KnowledgeSection.tsx +++ b/client/webui/frontend/src/lib/components/projects/KnowledgeSection.tsx @@ -5,7 +5,7 @@ import { useConfigContext, useDownload, useIsProjectOwner } from "@/lib/hooks"; import { useProjectArtifacts } from "@/lib/api/projects/hooks"; import { useProjectContext } from "@/lib/providers"; import type { ArtifactInfo, Project } from "@/lib/types"; -import { formatRelativeTime, validateFileSizes } from "@/lib/utils"; +import { formatRelativeTime, validateFileSizes, validateBatchUploadSize, validateProjectSizeLimit, calculateTotalFileSize } from "@/lib/utils"; import { ArtifactBar } from "../chat/artifact"; import { FileDetails } from "../chat/file"; @@ -27,8 +27,10 @@ export const KnowledgeSection: React.FC = ({ project }) = const { onDownload } = useDownload(project.id); const { validationLimits } = useConfigContext(); - // Get max upload size from config - if not available, skip client-side validation - const maxUploadSizeBytes = validationLimits?.maxUploadSizeBytes; + // Get validation limits from config - if not available, skip client-side validation + const maxPerFileUploadSizeBytes = validationLimits?.maxPerFileUploadSizeBytes; + const maxBatchUploadSizeBytes = validationLimits?.maxBatchUploadSizeBytes; + const maxProjectSizeBytes = validationLimits?.maxProjectSizeBytes; const [filesToUpload, setFilesToUpload] = useState(null); const [isSubmitting, setIsSubmitting] = useState(false); @@ -48,13 +50,30 @@ export const KnowledgeSection: React.FC = ({ project }) = }); }, [artifacts]); - // Validate file sizes before showing upload dialog - // if maxUploadSizeBytes is not configured, validation is skipped and backend handles it + const currentProjectArtifactSizeBytes = React.useMemo(() => { + return calculateTotalFileSize(artifacts); + }, [artifacts]); + const handleValidateFileSizes = useCallback( (files: FileList) => { - return validateFileSizes(files, { maxSizeBytes: maxUploadSizeBytes }); + const fileSizeResult = validateFileSizes(files, { maxSizeBytes: maxPerFileUploadSizeBytes }); + if (!fileSizeResult.valid) { + return fileSizeResult; + } + + const batchSizeResult = validateBatchUploadSize(files, maxBatchUploadSizeBytes); + if (!batchSizeResult.valid) { + return batchSizeResult; + } + + const projectSizeLimitResult = validateProjectSizeLimit(currentProjectArtifactSizeBytes, files, maxProjectSizeBytes); + if (!projectSizeLimitResult.valid) { + return { valid: false, error: projectSizeLimitResult.error }; + } + + return { valid: true }; }, - [maxUploadSizeBytes] + [maxPerFileUploadSizeBytes, maxBatchUploadSizeBytes, maxProjectSizeBytes, currentProjectArtifactSizeBytes] ); const handleFileUploadChange = (files: FileList | null) => { diff --git a/client/webui/frontend/src/lib/components/projects/ProjectImportDialog.tsx b/client/webui/frontend/src/lib/components/projects/ProjectImportDialog.tsx index 896b94f468..a1309ea412 100644 --- a/client/webui/frontend/src/lib/components/projects/ProjectImportDialog.tsx +++ b/client/webui/frontend/src/lib/components/projects/ProjectImportDialog.tsx @@ -35,7 +35,7 @@ const DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES = 100 * 1024 * 1024; export const ProjectImportDialog: React.FC = ({ open, onOpenChange, onImport }) => { const { validationLimits } = useConfigContext(); - const maxUploadSizeBytes = validationLimits?.maxUploadSizeBytes; + const maxPerFileUploadSizeBytes = validationLimits?.maxPerFileUploadSizeBytes; const maxZipUploadSizeBytes = validationLimits?.maxZipUploadSizeBytes ?? DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES; const [selectedFiles, setSelectedFiles] = useState(null); @@ -129,7 +129,7 @@ export const ProjectImportDialog: React.FC = ({ open, } } - const isOversized = maxUploadSizeBytes ? size > maxUploadSizeBytes : false; + const isOversized = maxPerFileUploadSizeBytes ? size > maxPerFileUploadSizeBytes : false; const artifactInfo: ArtifactPreviewInfo = { name: filename, size, isOversized }; artifacts.push(artifactInfo); @@ -273,11 +273,11 @@ export const ProjectImportDialog: React.FC = ({ open, {/* Warning for oversized artifacts */} - {projectPreview.oversizedArtifacts.length > 0 && maxUploadSizeBytes && ( + {projectPreview.oversizedArtifacts.length > 0 && maxPerFileUploadSizeBytes && (
a.name) .join(", ")}${projectPreview.oversizedArtifacts.length > 3 ? ` and ${projectPreview.oversizedArtifacts.length - 3} more` : ""}`} diff --git a/client/webui/frontend/src/lib/contexts/ConfigContext.ts b/client/webui/frontend/src/lib/contexts/ConfigContext.ts index 221defe888..6eed03f639 100644 --- a/client/webui/frontend/src/lib/contexts/ConfigContext.ts +++ b/client/webui/frontend/src/lib/contexts/ConfigContext.ts @@ -4,8 +4,10 @@ export interface ValidationLimits { projectNameMax?: number; projectDescriptionMax?: number; projectInstructionsMax?: number; - maxUploadSizeBytes?: number; + maxPerFileUploadSizeBytes?: number; + maxBatchUploadSizeBytes?: number; maxZipUploadSizeBytes?: number; + maxProjectSizeBytes?: number; } export interface ConfigContextValue { diff --git a/client/webui/frontend/src/lib/utils/file-validation.ts b/client/webui/frontend/src/lib/utils/file-validation.ts index 541242ff33..6ccf514ec2 100644 --- a/client/webui/frontend/src/lib/utils/file-validation.ts +++ b/client/webui/frontend/src/lib/utils/file-validation.ts @@ -2,6 +2,8 @@ * File validation utilities for consistent file size validation across the application. */ +import { formatBytes } from "./format"; + export interface FileSizeValidationResult { valid: boolean; error?: string; @@ -19,6 +21,14 @@ export interface FileSizeValidationOptions { maxFilesToList?: number; } +export interface ProjectSizeLimitValidationResult { + valid: boolean; + error?: string; + currentSize: number; + newSize: number; + totalSize: number; +} + /** * Validates file sizes against a maximum limit. * @@ -56,25 +66,25 @@ export function validateFileSizes(files: FileList | File[], options: FileSizeVal } // Build error message - const maxSizeMB = (maxSizeBytes / (1024 * 1024)).toFixed(0); + const maxSizeWithUnit = formatBytes(maxSizeBytes, 0); let errorMsg: string; if (oversizedFiles.length === 1) { const file = oversizedFiles[0]; if (includeFileSizes) { - const fileSizeMB = (file.size / (1024 * 1024)).toFixed(2); - errorMsg = `File "${file.name}" (${fileSizeMB} MB) exceeds the maximum size of ${maxSizeMB} MB.`; + const fileSizeWithUnit = formatBytes(file.size, 2); + errorMsg = `File "${file.name}" (${fileSizeWithUnit}) exceeds the maximum size of ${maxSizeWithUnit}.`; } else { - errorMsg = `File "${file.name}" exceeds the maximum size of ${maxSizeMB} MB.`; + errorMsg = `File "${file.name}" exceeds the maximum size of ${maxSizeWithUnit}.`; } } else { const fileList = oversizedFiles.slice(0, maxFilesToList); - const fileNames = includeFileSizes ? fileList.map(f => `${f.name} (${(f.size / (1024 * 1024)).toFixed(2)} MB)`) : fileList.map(f => f.name); + const fileNames = includeFileSizes ? fileList.map(f => `${f.name} (${formatBytes(f.size, 2)})`) : fileList.map(f => f.name); const remaining = oversizedFiles.length - maxFilesToList; const suffix = remaining > 0 ? ` and ${remaining} more` : ""; - errorMsg = `${oversizedFiles.length} files exceed the maximum size of ${maxSizeMB} MB: ${fileNames.join(", ")}${suffix}`; + errorMsg = `${oversizedFiles.length} files exceed the maximum size of ${maxSizeWithUnit}: ${fileNames.join(", ")}${suffix}`; } return { @@ -85,20 +95,42 @@ export function validateFileSizes(files: FileList | File[], options: FileSizeVal } /** - * Formats a file size in bytes to a human-readable string. + * Validates that the batch upload size doesn't exceed the limit. + * This is independent of the total project size. * - * @param bytes - File size in bytes - * @param decimals - Number of decimal places (default: 2) - * @returns Formatted string like "1.5 MB" or "500 KB" + * @param files - FileList or array of Files to validate + * @param maxBatchUploadSizeBytes - Maximum batch upload size limit + * @returns Validation result with error message if batch exceeds limit */ -export function formatFileSize(bytes: number, decimals: number = 2): string { - if (bytes === 0) return "0 Bytes"; +export function validateBatchUploadSize(files: FileList | File[], maxBatchUploadSizeBytes?: number): FileSizeValidationResult { + if (!maxBatchUploadSizeBytes) { + return { valid: true }; + } + + const totalBatchSize = calculateTotalFileSize(files); - const k = 1024; - const sizes = ["Bytes", "KB", "MB", "GB", "TB"]; - const i = Math.floor(Math.log(bytes) / Math.log(k)); + if (totalBatchSize <= maxBatchUploadSizeBytes) { + return { valid: true }; + } + + const totalBatchWithUnit = formatBytes(totalBatchSize, 2); + const limitWithUnit = formatBytes(maxBatchUploadSizeBytes, 0); + + return { + valid: false, + error: `Batch upload size (${totalBatchWithUnit}) exceeds limit of ${limitWithUnit}. Please upload fewer files at once.`, + }; +} - return `${parseFloat((bytes / Math.pow(k, i)).toFixed(decimals))} ${sizes[i]}`; +/** + * Calculates the total size of multiple files in bytes. + * + * @param files - Some list of Files, or Array of objects with size property + * @returns Total size in bytes + */ +export function calculateTotalFileSize(files: FileList | File[] | Array<{ size: number }>): number { + const fileArray: Array<{ size: number }> = Array.isArray(files) ? files : Array.from(files); + return fileArray.reduce((sum, file) => sum + file.size, 0); } /** @@ -123,7 +155,42 @@ export function isFileSizeValid(file: File, maxSizeBytes?: number): boolean { * @returns Formatted error message */ export function createFileSizeErrorMessage(filename: string, actualSize: number, maxSize: number): string { - const actualSizeMB = (actualSize / (1024 * 1024)).toFixed(2); - const maxSizeMB = (maxSize / (1024 * 1024)).toFixed(2); - return `File "${filename}" is too large: ${actualSizeMB} MB exceeds the maximum allowed size of ${maxSizeMB} MB.`; + const actualSizeWithUnit = formatBytes(actualSize, 2); + const maxSizeWithUnit = formatBytes(maxSize, 2); + return `File "${filename}" is too large: ${actualSizeWithUnit} exceeds the maximum allowed size of ${maxSizeWithUnit}.`; +} + +/** + * Validates total project size: existing files + new files <= maxProjectSizeBytes + * This enforces a project-level storage limit, not a per-request limit. + * + * @param currentProjectSizeBytes - Current total size of project artifacts in bytes + * @param newFiles - FileList or array of Files to be uploaded + * @param maxProjectSizeBytes - Maximum total project size limit + * @returns Validation result with error message if limit would be exceeded + */ +export function validateProjectSizeLimit(currentProjectSizeBytes: number, newFiles: FileList | File[], maxProjectSizeBytes?: number): ProjectSizeLimitValidationResult { + const newSize = calculateTotalFileSize(newFiles); + const totalSize = currentProjectSizeBytes + newSize; + + if (!maxProjectSizeBytes) { + return { valid: true, currentSize: currentProjectSizeBytes, newSize, totalSize }; + } + + if (totalSize <= maxProjectSizeBytes) { + return { valid: true, currentSize: currentProjectSizeBytes, newSize, totalSize }; + } + + const currentWithUnit = formatBytes(currentProjectSizeBytes, 2); + const newWithUnit = formatBytes(newSize, 2); + const totalWithUnit = formatBytes(totalSize, 2); + const limitWithUnit = formatBytes(maxProjectSizeBytes, 0); + + return { + valid: false, + currentSize: currentProjectSizeBytes, + newSize, + totalSize, + error: `Project size limit exceeded. Current: ${currentWithUnit}, New files: ${newWithUnit}, Total: ${totalWithUnit} exceeds limit of ${limitWithUnit}.`, + }; } diff --git a/src/solace_agent_mesh/gateway/base/app.py b/src/solace_agent_mesh/gateway/base/app.py index fc141a832c..8018ed165e 100644 --- a/src/solace_agent_mesh/gateway/base/app.py +++ b/src/solace_agent_mesh/gateway/base/app.py @@ -16,6 +16,7 @@ get_gateway_response_subscription_topic, get_gateway_status_subscription_topic, ) +from .. import constants log = logging.getLogger(__name__) @@ -77,14 +78,14 @@ class BaseGatewayComponent(ComponentBase): "name": "gateway_max_artifact_resolve_size_bytes", "required": False, "type": "integer", - "default": 104857600, # 100MB + "default": constants.DEFAULT_MAX_ARTIFACT_RESOLVE_SIZE_BYTES, "description": "Maximum size of an individual artifact's raw content for 'artifact_content' embeds and max total accumulated size for a parent artifact after internal recursive resolution.", }, { "name": "gateway_recursive_embed_depth", "required": False, "type": "integer", - "default": 12, + "default": constants.DEFAULT_GATEWAY_RECURSIVE_EMBED_DEPTH, "description": "Maximum depth for recursively resolving 'artifact_content' embeds within files.", }, { @@ -104,16 +105,30 @@ class BaseGatewayComponent(ComponentBase): "name": "gateway_max_message_size_bytes", "required": False, "type": "integer", - "default": 10_000_000, # 10MB + "default": constants.DEFAULT_GATEWAY_MAX_MESSAGE_SIZE_BYTES, "description": "Maximum allowed message size in bytes for messages published by the gateway.", }, { "name": "gateway_max_upload_size_bytes", "required": False, "type": "integer", - "default": 52428800, # 50MB + "default": constants.DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES, "description": "Maximum file upload size in bytes. Validated before reading file content to prevent memory exhaustion.", }, + { + "name": "gateway_max_project_size_bytes", + "required": False, + "type": "integer", + "default": constants.DEFAULT_MAX_PROJECT_SIZE_BYTES, + "description": "Maximum total upload size limit per project in bytes.", + }, + { + "name": "gateway_max_batch_upload_size_bytes", + "required": False, + "type": "integer", + "default": constants.DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES, + "description": "Maximum total size in bytes for all files in a single batch upload request.", + }, # --- Default User Identity Configuration --- { "name": "default_user_identity", @@ -274,7 +289,7 @@ def __init__(self, app_info: Dict[str, Any], **kwargs): ) new_size_limit_key = "gateway_max_artifact_resolve_size_bytes" - default_new_size_limit = 104857600 + default_new_size_limit = constants.DEFAULT_MAX_ARTIFACT_RESOLVE_SIZE_BYTES old_size_limit_key = "gateway_artifact_content_limit_bytes" new_value = resolved_app_config_block.get(new_size_limit_key) @@ -299,16 +314,22 @@ def __init__(self, app_info: Dict[str, Any], **kwargs): self.gateway_max_artifact_resolve_size_bytes = default_new_size_limit self.gateway_recursive_embed_depth: int = resolved_app_config_block.get( - "gateway_recursive_embed_depth", 12 + "gateway_recursive_embed_depth", constants.DEFAULT_GATEWAY_RECURSIVE_EMBED_DEPTH ) self.artifact_handling_mode: str = resolved_app_config_block.get( "artifact_handling_mode", "reference" ) self.gateway_max_message_size_bytes: int = resolved_app_config_block.get( - "gateway_max_message_size_bytes", 10_000_000 + "gateway_max_message_size_bytes", constants.DEFAULT_GATEWAY_MAX_MESSAGE_SIZE_BYTES ) self.gateway_max_upload_size_bytes: int = resolved_app_config_block.get( - "gateway_max_upload_size_bytes", 52428800 + "gateway_max_upload_size_bytes", constants.DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES + ) + self.gateway_max_project_size_bytes: int = resolved_app_config_block.get( + "gateway_max_project_size_bytes", constants.DEFAULT_MAX_PROJECT_SIZE_BYTES + ) + self.gateway_max_batch_upload_size_bytes: int = resolved_app_config_block.get( + "gateway_max_batch_upload_size_bytes", constants.DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES ) modified_app_info = app_info.copy() diff --git a/src/solace_agent_mesh/gateway/constants.py b/src/solace_agent_mesh/gateway/constants.py new file mode 100644 index 0000000000..756f84cbe6 --- /dev/null +++ b/src/solace_agent_mesh/gateway/constants.py @@ -0,0 +1,28 @@ +""" +Shared constants for the HTTP/SSE gateway. + +This module contains configuration defaults that are shared across +multiple components to avoid duplication and ensure consistency. +""" + +# ===== ARTIFACT AND MESSAGE SIZE LIMITS ===== + +# Artifact prefix +ARTIFACTS_PREFIX = 'artifacts/' + +# Artifact content resolution limits +DEFAULT_MAX_ARTIFACT_RESOLVE_SIZE_BYTES = 104857600 # 100MB - max size for artifact content embeds + +# Recursive embed resolution limits +DEFAULT_GATEWAY_RECURSIVE_EMBED_DEPTH = 12 # Maximum depth for resolving artifact_content embeds + +# Message size limits +DEFAULT_GATEWAY_MAX_MESSAGE_SIZE_BYTES = 10_000_000 # 10MB - max message size for gateway publishing + +# ===== FILE UPLOAD SIZE LIMITS ===== + +# Production defaults +DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES = 52428800 # 50MB - per-file upload limit +DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES = 104857600 # 100MB - batch upload limit (sum of files in one upload) +DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES = 104857600 # 100MB - ZIP import limit +DEFAULT_MAX_PROJECT_SIZE_BYTES = 104857600 # 100MB - total project size limit diff --git a/src/solace_agent_mesh/gateway/http_sse/routers/config.py b/src/solace_agent_mesh/gateway/http_sse/routers/config.py index a3b3171d42..d80dd46aff 100644 --- a/src/solace_agent_mesh/gateway/http_sse/routers/config.py +++ b/src/solace_agent_mesh/gateway/http_sse/routers/config.py @@ -11,6 +11,12 @@ from ..routers.dto.requests.project_requests import CreateProjectRequest from ....gateway.http_sse.dependencies import get_sac_component, get_api_config +from ...constants import ( + DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_PROJECT_SIZE_BYTES, +) if TYPE_CHECKING: from ..component import WebUIBackendComponent @@ -20,12 +26,6 @@ router = APIRouter() -# Default max upload size (50MB) - matches gateway_max_upload_size_bytes default -DEFAULT_MAX_UPLOAD_SIZE_BYTES = 52428800 -# Default max ZIP upload size (100MB) - for project import ZIP files -DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES = 104857600 - - def _get_validation_limits(component: "WebUIBackendComponent" = None) -> Dict[str, Any]: """ Extract validation limits from Pydantic models to expose to frontend. @@ -33,25 +33,35 @@ def _get_validation_limits(component: "WebUIBackendComponent" = None) -> Dict[st """ # Extract limits from CreateProjectRequest model create_fields = CreateProjectRequest.model_fields - - # Get max upload size from component config, with fallback to default - max_upload_size_bytes = ( - component.get_config("gateway_max_upload_size_bytes", DEFAULT_MAX_UPLOAD_SIZE_BYTES) - if component else DEFAULT_MAX_UPLOAD_SIZE_BYTES + + max_per_file_upload_size_bytes = ( + component.get_config("gateway_max_upload_size_bytes", DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES) + if component else DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES ) - - # Get max ZIP upload size from component config, with fallback to default (100MB) + + max_batch_upload_size_bytes = ( + component.get_config("gateway_max_batch_upload_size_bytes", DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES) + if component else DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES + ) + max_zip_upload_size_bytes = ( component.get_config("gateway_max_zip_upload_size_bytes", DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES) if component else DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES ) - + + max_project_size_bytes = ( + component.get_config("gateway_max_project_size_bytes", DEFAULT_MAX_PROJECT_SIZE_BYTES) + if component else DEFAULT_MAX_PROJECT_SIZE_BYTES + ) + return { "projectNameMax": create_fields["name"].metadata[1].max_length if create_fields["name"].metadata else 255, "projectDescriptionMax": create_fields["description"].metadata[0].max_length if create_fields["description"].metadata else 1000, "projectInstructionsMax": create_fields["system_prompt"].metadata[0].max_length if create_fields["system_prompt"].metadata else 4000, - "maxUploadSizeBytes": max_upload_size_bytes, + "maxPerFileUploadSizeBytes": max_per_file_upload_size_bytes, + "maxBatchUploadSizeBytes": max_batch_upload_size_bytes, "maxZipUploadSizeBytes": max_zip_upload_size_bytes, + "maxProjectSizeBytes": max_project_size_bytes, } diff --git a/src/solace_agent_mesh/gateway/http_sse/services/project_service.py b/src/solace_agent_mesh/gateway/http_sse/services/project_service.py index aa2b16827c..1870993242 100644 --- a/src/solace_agent_mesh/gateway/http_sse/services/project_service.py +++ b/src/solace_agent_mesh/gateway/http_sse/services/project_service.py @@ -6,16 +6,19 @@ import logging import json import zipfile +import os from io import BytesIO from fastapi import UploadFile from datetime import datetime, timezone from ....agent.utils.artifact_helpers import get_artifact_info_list, save_artifact_with_metadata, get_artifact_counts_batch - -# Default max upload size (50MB) - matches gateway_max_upload_size_bytes default -DEFAULT_MAX_UPLOAD_SIZE_BYTES = 52428800 -# Default max ZIP upload size (100MB) - for project import ZIP files -DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES = 104857600 +from ...constants import ( + DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES, + DEFAULT_MAX_PROJECT_SIZE_BYTES, + ARTIFACTS_PREFIX +) try: from google.adk.artifacts import BaseArtifactService @@ -35,6 +38,18 @@ class BaseArtifactService: from ..component import WebUIBackendComponent +def bytes_to_mb(size_bytes: int) -> float: + """Convert bytes to megabytes.""" + return size_bytes / (1024 * 1024) + + +def sanitize_for_log(value: str) -> str: + """Strip control characters to prevent log injection.""" + if not value: + return "" + return "".join(c for c in str(value) if c >= " ") + + class ProjectService: """Service layer for project business logic.""" @@ -47,7 +62,7 @@ def __init__( self.artifact_service = component.get_shared_artifact_service() if component else None self.app_name = component.get_config("name", "WebUIBackendApp") if component else "WebUIBackendApp" self.logger = logging.getLogger(__name__) - + # Initialize resource sharing service if resource_sharing_service: self._resource_sharing_service = resource_sharing_service @@ -55,29 +70,46 @@ def __init__( # Get from registry (returns class, need to instantiate) service_class = MiddlewareRegistry.get_resource_sharing_service() self._resource_sharing_service = service_class() - - # Get max upload size from component config, with fallback to default - # Ensure values are integers for proper formatting - max_upload_config = ( - component.get_config("gateway_max_upload_size_bytes", DEFAULT_MAX_UPLOAD_SIZE_BYTES) - if component else DEFAULT_MAX_UPLOAD_SIZE_BYTES + + # Get config values, with fallback to default (ensure values are integers for proper formatting) + max_per_file_upload_config = ( + component.get_config("gateway_max_upload_size_bytes", DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES) + if component else DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES ) - self.max_upload_size_bytes = int(max_upload_config) if isinstance(max_upload_config, (int, float)) else DEFAULT_MAX_UPLOAD_SIZE_BYTES - - # Get max ZIP upload size from component config, with fallback to default (100MB) + self.max_per_file_upload_size_bytes = int(max_per_file_upload_config) if isinstance(max_per_file_upload_config, (int, float)) else DEFAULT_MAX_PER_FILE_UPLOAD_SIZE_BYTES + + max_batch_upload_config = ( + component.get_config("gateway_max_batch_upload_size_bytes", DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES) + if component else DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES + ) + self.max_batch_upload_size_bytes = int(max_batch_upload_config) if isinstance(max_batch_upload_config, (int, float)) else DEFAULT_MAX_BATCH_UPLOAD_SIZE_BYTES + max_zip_config = ( component.get_config("gateway_max_zip_upload_size_bytes", DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES) if component else DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES ) self.max_zip_upload_size_bytes = int(max_zip_config) if isinstance(max_zip_config, (int, float)) else DEFAULT_MAX_ZIP_UPLOAD_SIZE_BYTES - + + max_project_size_config = ( + component.get_config("gateway_max_project_size_bytes", DEFAULT_MAX_PROJECT_SIZE_BYTES) + if component else DEFAULT_MAX_PROJECT_SIZE_BYTES + ) + self.max_project_size_bytes = int(max_project_size_config) if isinstance(max_project_size_config, (int, float)) else DEFAULT_MAX_PROJECT_SIZE_BYTES + self.logger.info( - "[ProjectService] Initialized with max_upload_size_bytes=%d (%.2f MB), " - "max_zip_upload_size_bytes=%d (%.2f MB)", - self.max_upload_size_bytes, - self.max_upload_size_bytes / (1024*1024), + "[ProjectService] Initialized with " + "max_per_file_upload_size_bytes=%d (%.2f MB), " + "max_batch_upload_size_bytes=%d (%.2f MB), " + "max_zip_upload_size_bytes=%d (%.2f MB), " + "max_project_size_bytes=%d (%.2f MB)", + self.max_per_file_upload_size_bytes, + bytes_to_mb(self.max_per_file_upload_size_bytes), + self.max_batch_upload_size_bytes, + bytes_to_mb(self.max_batch_upload_size_bytes), self.max_zip_upload_size_bytes, - self.max_zip_upload_size_bytes / (1024*1024) + bytes_to_mb(self.max_zip_upload_size_bytes), + self.max_project_size_bytes, + bytes_to_mb(self.max_project_size_bytes) ) def _get_repositories(self, db): @@ -117,11 +149,11 @@ async def _validate_file_size(self, file: UploadFile, log_prefix: str = "") -> b total_bytes_read += chunk_len # Validate size during reading (fail fast) - if total_bytes_read > self.max_upload_size_bytes: + if total_bytes_read > self.max_per_file_upload_size_bytes: error_msg = ( f"File '{file.filename}' rejected: size exceeds maximum " - f"{self.max_upload_size_bytes:,} bytes " - f"({self.max_upload_size_bytes / (1024*1024):.2f} MB). " + f"{self.max_per_file_upload_size_bytes:,} bytes " + f"({bytes_to_mb(self.max_per_file_upload_size_bytes):.2f} MB). " f"Read {total_bytes_read:,} bytes so far." ) self.logger.warning(f"{log_prefix} {error_msg}") @@ -158,6 +190,84 @@ async def _validate_files( ) return validated_files + def _validate_batch_upload_size( + self, + files_size: int, + log_prefix: str = "" + ) -> None: + """ + Validate that the total size of files in a single upload batch doesn't exceed limit. + This is independent of the total project size. + + Args: + files_size: Total size of all files being uploaded in this batch (bytes) + log_prefix: Logging prefix + + Raises: + ValueError: If batch size exceeds limit + """ + + files_mb = bytes_to_mb(files_size) + limit_mb = bytes_to_mb(self.max_batch_upload_size_bytes) + + if files_size > self.max_batch_upload_size_bytes: + error_msg = ( + f"Batch upload size limit exceeded. " + f"Total files in this upload: {files_mb:.2f} MB exceeds limit of {limit_mb:.2f} MB." + ) + self.logger.warning(f"{log_prefix} {error_msg}") + raise ValueError(error_msg) + + self.logger.debug( + f"{log_prefix} Batch upload size check passed: " + f"{files_mb:.2f} MB / {limit_mb:.2f} MB " + f"({(files_size / self.max_batch_upload_size_bytes * 100):.1f}% of batch limit)" + ) + + def _validate_project_size_limit( + self, + current_project_size: int, + new_files_size: int, + log_prefix: str = "" + ) -> None: + """ + Validate that adding new files won't exceed project total size limit. + + Only counts user-uploaded files (source="project") toward the limit. + LLM-generated artifacts and system files are excluded. + + Args: + current_project_size: Current total size of user-uploaded files in bytes + new_files_size: Total size of new files being added in bytes + log_prefix: Logging prefix + + Raises: + ValueError: If combined size would exceed limit + """ + + total_size = current_project_size + new_files_size + + current_mb = bytes_to_mb(current_project_size) + new_mb = bytes_to_mb(new_files_size) + total_mb = bytes_to_mb(total_size) + limit_mb = bytes_to_mb(self.max_project_size_bytes) + + if total_size > self.max_project_size_bytes: + error_msg = ( + f"Project size limit exceeded. " + f"Current: {current_mb:.2f} MB, " + f"New files: {new_mb:.2f} MB, " + f"Total: {total_mb:.2f} MB exceeds limit of {limit_mb:.2f} MB." + ) + self.logger.warning(f"{log_prefix} {error_msg}") + raise ValueError(error_msg) + + self.logger.debug( + f"{log_prefix} Project size check passed: " + f"{total_mb:.2f} MB / {limit_mb:.2f} MB " + f"({(total_size / self.max_project_size_bytes * 100):.1f}% used)" + ) + async def create_project( self, db, @@ -187,7 +297,7 @@ async def create_project( Raises: ValueError: If project name is invalid, user_id is missing, or file size exceeds limit """ - log_prefix = f"[ProjectService:create_project] User {user_id}:" + log_prefix = f"[ProjectService:create_project] User {sanitize_for_log(user_id)}:" self.logger.info(f"Creating new project '{name}' for user {user_id}") # Business validation @@ -204,6 +314,19 @@ async def create_project( validated_files = await self._validate_files(files, log_prefix) self.logger.info(f"{log_prefix} All {len(files)} files passed size validation") + new_files_size = sum(len(content) for _, content in validated_files) + + self._validate_batch_upload_size( + files_size=new_files_size, + log_prefix=log_prefix + ) + + self._validate_project_size_limit( + current_project_size=0, + new_files_size=new_files_size, + log_prefix=log_prefix + ) + project_repository = self._get_repositories(db) # Check for duplicate project name for this user (only owned projects) @@ -370,7 +493,7 @@ async def get_project_artifacts(self, db, project_id: str, user_id: str) -> List raise ValueError("Project not found or access denied") if not self.artifact_service: - self.logger.warning(f"Attempted to get artifacts for project {project_id} but no artifact service is configured.") + self.logger.warning("Attempted to get artifacts for project but no artifact service is configured.") return [] storage_user_id = project.user_id @@ -410,7 +533,7 @@ async def add_artifacts_to_project( Raises: ValueError: If project not found, access denied, or file size exceeds limit """ - log_prefix = f"[ProjectService:add_artifacts] Project {project_id}, User {user_id}:" + log_prefix = f"[ProjectService:add_artifacts] Project {sanitize_for_log(project_id)}, User {sanitize_for_log(user_id)}:" project = self.get_project(db, project_id, user_id) if not project: @@ -432,6 +555,23 @@ async def add_artifacts_to_project( validated_files = await self._validate_files(files, log_prefix) self.logger.info(f"{log_prefix} All {len(files)} files passed size validation") + new_files_size = sum(len(content) for _, content in validated_files) + + self._validate_batch_upload_size( + files_size=new_files_size, + log_prefix=log_prefix + ) + + existing_artifacts = await self.get_project_artifacts(db, project_id, user_id) + # Only count user-uploaded artifacts, which have the "source" metadata set to "project" + current_project_size = sum(artifact.size for artifact in existing_artifacts if artifact.source == "project") + + self._validate_project_size_limit( + current_project_size=current_project_size, + new_files_size=new_files_size, + log_prefix=log_prefix + ) + self.logger.info(f"Adding {len(validated_files)} artifacts to project {project_id} for user {user_id}") storage_session_id = f"project-{project.id}" results = [] @@ -803,7 +943,7 @@ async def export_project_as_zip( if artifact_part and artifact_part.inline_data: # Add to ZIP under artifacts/ directory zip_file.writestr( - f'artifacts/{artifact.filename}', + f'{ARTIFACTS_PREFIX}{artifact.filename}', artifact_part.inline_data.data ) except Exception as e: @@ -834,7 +974,7 @@ async def import_project_from_zip( Raises: ValueError: If ZIP is invalid, import fails, or file size exceeds limit """ - log_prefix = f"[ProjectService:import_project] User {user_id}:" + log_prefix = f"[ProjectService:import_project] User {sanitize_for_log(user_id)}:" warnings = [] # Read ZIP file content with size validation @@ -845,8 +985,8 @@ async def import_project_from_zip( # Validate ZIP file size (separate, larger limit than individual artifacts) if zip_size > self.max_zip_upload_size_bytes: - max_size_mb = self.max_zip_upload_size_bytes / (1024 * 1024) - file_size_mb = zip_size / (1024 * 1024) + max_size_mb = bytes_to_mb(self.max_zip_upload_size_bytes) + file_size_mb = bytes_to_mb(zip_size) error_msg = ( f"ZIP file '{zip_file.filename}' rejected: size ({file_size_mb:.2f} MB) " f"exceeds maximum allowed ({max_size_mb:.2f} MB)" @@ -891,7 +1031,36 @@ async def import_project_from_zip( # Get default agent ID, but set to None if not provided # The agent may not exist in the target environment imported_agent_id = project_data['project'].get('defaultAgentId') - + + # Pre-calculate total artifacts size for limit validation + artifact_files = [ + name for name in zip_ref.namelist() + if name.startswith(ARTIFACTS_PREFIX) and name != ARTIFACTS_PREFIX + ] + + total_artifacts_size = 0 + oversized_artifacts = [] + + for artifact_path in artifact_files: + file_info = zip_ref.getinfo(artifact_path) + uncompressed_size = file_info.file_size + + # Track oversized files (will be skipped during import) + if uncompressed_size > self.max_per_file_upload_size_bytes: + safe_filename = os.path.basename(artifact_path) + oversized_artifacts.append( + (safe_filename, uncompressed_size) + ) + continue + + total_artifacts_size += uncompressed_size + + self._validate_project_size_limit( + current_project_size=0, + new_files_size=total_artifacts_size, + log_prefix=log_prefix + ) + # Create project (agent validation happens in create_project if needed) project = await self.create_project( db=db, @@ -913,20 +1082,23 @@ async def import_project_from_zip( artifacts_imported = 0 if self.artifact_service: storage_session_id = f"project-{project.id}" - artifact_files = [ - name for name in zip_ref.namelist() - if name.startswith('artifacts/') and name != 'artifacts/' - ] for artifact_path in artifact_files: try: - filename = artifact_path.replace('artifacts/', '') + filename = os.path.basename(artifact_path) + + # Validate filename is safe + if not filename or filename in ('.', '..'): + self.logger.warning(f"{log_prefix} Skipping invalid filename in ZIP: {artifact_path}") + warnings.append(f"Skipped invalid filename: {artifact_path}") + continue + content_bytes = zip_ref.read(artifact_path) # Skip oversized artifacts with a warning (don't fail the entire import) - if len(content_bytes) > self.max_upload_size_bytes: - max_size_mb = self.max_upload_size_bytes / (1024 * 1024) - file_size_mb = len(content_bytes) / (1024 * 1024) + if len(content_bytes) > self.max_per_file_upload_size_bytes: + max_size_mb = bytes_to_mb(self.max_per_file_upload_size_bytes) + file_size_mb = bytes_to_mb(len(content_bytes)) skip_msg = ( f"Skipped '{filename}': size ({file_size_mb:.2f} MB) " f"exceeds maximum allowed ({max_size_mb:.2f} MB)" diff --git a/tests/integration/apis/persistence/projects/conftest.py b/tests/integration/apis/persistence/projects/conftest.py index 56e7dd026d..6d811c3847 100644 --- a/tests/integration/apis/persistence/projects/conftest.py +++ b/tests/integration/apis/persistence/projects/conftest.py @@ -5,9 +5,18 @@ """ import pytest +from fastapi import Request from fastapi.testclient import TestClient -from unittest.mock import AsyncMock +from sam_test_infrastructure.artifact_service.service import TestInMemoryArtifactService from sam_test_infrastructure.fastapi_service.webui_backend_factory import WebUIBackendFactory +from solace_agent_mesh.shared.api.auth_utils import get_current_user + +# If get_user_id is not available, define a fallback or mock for testing purposes. +try: + from solace_agent_mesh.gateway.http_sse.dependencies import get_user_id +except ImportError: + def get_user_id(request: Request): + return request.headers.get("X-Test-User-Id", "sam_dev_user") # Custom header for test user identification (matches parent conftest) @@ -15,9 +24,7 @@ def _create_custom_config_client( - db_url: str, - projects_enabled: bool = True, - feature_flag_enabled: bool = True + db_url: str, projects_enabled: bool = True, feature_flag_enabled: bool = True ): """ Helper to create a test client with custom project configuration. @@ -32,9 +39,6 @@ def _create_custom_config_client( """ factory = WebUIBackendFactory(db_url=db_url) - # Store original get_config - original_get_config = factory.mock_component.get_config - def custom_get_config(key, default=None): # Override specific config keys if key == "projects": @@ -65,16 +69,25 @@ def custom_get_config(key, default=None): return "A2A Agent" if key == "frontend_logo_url": return "" + if key == "gateway_max_upload_size_bytes": + # Test override: 1MB per-file limit for projects tests + return 1024 * 1024 # 1MB + if key == "gateway_max_batch_upload_size_bytes": + # Test override: 2MB batch upload limit for projects tests + return 2097152 # 2MB + if key == "gateway_max_project_size_bytes": + # Test override: 3MB total project size limit + return 3145728 # 3MB # For other keys, return the default to avoid Mock objects return default if default is not None else {} factory.mock_component.get_config = custom_get_config - # Set up auth overrides - from fastapi import Request - from solace_agent_mesh.shared.api.auth_utils import get_current_user - from solace_agent_mesh.gateway.http_sse.dependencies import get_user_id + # Set up artifact service + artifact_service = TestInMemoryArtifactService() + factory.mock_component.get_shared_artifact_service = lambda: artifact_service + factory._test_artifact_service = artifact_service async def override_get_current_user(request: Request): user_id = request.headers.get(TEST_USER_HEADER, "sam_dev_user") @@ -121,9 +134,7 @@ def projects_disabled_client(db_provider): db_url = str(db_provider.get_sync_gateway_engine().url) client = _create_custom_config_client( - db_url=db_url, - projects_enabled=False, - feature_flag_enabled=True + db_url=db_url, projects_enabled=False, feature_flag_enabled=True ) yield client @@ -140,9 +151,7 @@ def feature_flag_disabled_client(db_provider): db_url = str(db_provider.get_sync_gateway_engine().url) client = _create_custom_config_client( - db_url=db_url, - projects_enabled=True, - feature_flag_enabled=False + db_url=db_url, projects_enabled=True, feature_flag_enabled=False ) yield client @@ -159,9 +168,7 @@ def both_disabled_client(db_provider): db_url = str(db_provider.get_sync_gateway_engine().url) client = _create_custom_config_client( - db_url=db_url, - projects_enabled=False, - feature_flag_enabled=False + db_url=db_url, projects_enabled=False, feature_flag_enabled=False ) yield client @@ -178,9 +185,7 @@ def both_enabled_client(db_provider): db_url = str(db_provider.get_sync_gateway_engine().url) client = _create_custom_config_client( - db_url=db_url, - projects_enabled=True, - feature_flag_enabled=True + db_url=db_url, projects_enabled=True, feature_flag_enabled=True ) yield client diff --git a/tests/integration/apis/persistence/projects/test_projects_upload_limits.py b/tests/integration/apis/persistence/projects/test_projects_upload_limits.py new file mode 100644 index 0000000000..2ee2cc07e0 --- /dev/null +++ b/tests/integration/apis/persistence/projects/test_projects_upload_limits.py @@ -0,0 +1,323 @@ +""" +Tests for project upload size limits. + +Test-specific overrides (see conftest.py): +- Per-file limit: 1MB (gateway_max_upload_size_bytes) +- Batch limit: 2MB (gateway_max_batch_upload_size_bytes) +- Project total limit: 3MB (gateway_max_project_size_bytes) +""" + +import io +import json +import zipfile + +from fastapi.testclient import TestClient + +from tests.integration.apis.infrastructure.gateway_adapter import GatewayAdapter + +KB = 1024 +MB = 1024 * KB + + +def make_file(name: str, size: int): + return ("files", (name, io.BytesIO(b"x" * size), "text/plain")) + + +def make_files(count: int, size: int, prefix: str = "file"): + return [ + ( + "files", + (f"{prefix}_{i}.txt", io.BytesIO(bytes([i % 256]) * size), "text/plain"), + ) + for i in range(count) + ] + + +def seed(gw: GatewayAdapter, project_id: str): + gw.seed_project( + project_id=project_id, + name=project_id, + user_id="sam_dev_user", + description="", + ) + + +class TestPerFileSizeLimit: + + def test_file_over_limit_rejected(self, both_enabled_client: TestClient): + response = both_enabled_client.post( + "/api/v1/projects", + data={"name": "Test", "description": ""}, + files=[make_file("big.txt", MB + 1)], + ) + assert response.status_code == 413 + assert "exceeds maximum" in response.json()["detail"] + + def test_file_at_limit_succeeds(self, both_enabled_client: TestClient): + response = both_enabled_client.post( + "/api/v1/projects", + data={"name": "Test", "description": ""}, + files=[make_file("exact.txt", MB)], + ) + assert response.status_code == 201 + + def test_oversized_file_in_batch_rejects_entire_upload( + self, both_enabled_client: TestClient + ): + response = both_enabled_client.post( + "/api/v1/projects", + data={"name": "Test", "description": ""}, + files=[make_file("ok.txt", 100 * KB), make_file("big.txt", MB + 1)], + ) + assert response.status_code == 413 + + def test_file_over_limit_on_artifact_upload( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "per-file-artifact") + response = both_enabled_client.post( + "/api/v1/projects/per-file-artifact/artifacts", + files=[make_file("big.txt", MB + 1)], + ) + assert response.status_code == 413 + + +class TestBatchUploadSizeLimit: + + def test_batch_exceeds_limit_on_creation(self, both_enabled_client: TestClient): + response = both_enabled_client.post( + "/api/v1/projects", + data={"name": "Test", "description": ""}, + files=make_files(3, 800 * KB), # 2.4MB > 2MB + ) + assert response.status_code == 400 + assert "Batch upload size limit exceeded" in response.json()["detail"] + + def test_batch_at_limit_succeeds(self, both_enabled_client: TestClient): + response = both_enabled_client.post( + "/api/v1/projects", + data={"name": "Test", "description": ""}, + files=[make_file("f1.txt", MB), make_file("f2.txt", MB)], # exactly 2MB + ) + assert response.status_code == 201 + + def test_batch_limit_checked_before_project_limit( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "batch-before-project") + both_enabled_client.post( + "/api/v1/projects/batch-before-project/artifacts", + files=[make_file("seed.txt", MB)], + ) + # 2.4MB batch exceeds 2MB batch limit; also would exceed 3MB project total, + # but batch validation fires first + response = both_enabled_client.post( + "/api/v1/projects/batch-before-project/artifacts", + files=make_files(3, 800 * KB), + ) + assert response.status_code == 400 + assert "Batch upload size limit exceeded" in response.json()["detail"] + + +class TestProjectTotalSizeLimit: + + def test_cumulative_uploads_exceed_limit( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "project-total") + for i in range(3): + r = both_enabled_client.post( + "/api/v1/projects/project-total/artifacts", + files=[make_file(f"f{i}.txt", 900 * KB)], + ) + assert r.status_code == 201 + + response = both_enabled_client.post( + "/api/v1/projects/project-total/artifacts", + files=[make_file("f3.txt", 900 * KB)], + ) + assert response.status_code == 400 + assert "Project size limit exceeded" in response.json()["detail"] + + def test_batch_under_batch_limit_but_over_project_limit( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "batch-vs-project") + r = both_enabled_client.post( + "/api/v1/projects/batch-vs-project/artifacts", + files=[ + make_file("f1.txt", 900 * KB), + make_file("f2.txt", 900 * KB), + ], # 1.8MB batch + ) + assert r.status_code == 201 + + # 1.8MB batch (under 2MB batch limit), but 3.6MB total (over 3MB project limit) + response = both_enabled_client.post( + "/api/v1/projects/batch-vs-project/artifacts", + files=[ + make_file("f3.txt", 900 * KB), + make_file("f4.txt", 900 * KB), + ], + ) + assert response.status_code == 400 + assert "Project size limit exceeded" in response.json()["detail"] + + +class TestZipImportLimits: + + def _make_zip(self, file_count: int, file_size: int): + buf = io.BytesIO() + with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf: + meta = { + "version": "1.0", + "exportedAt": 1234567890, + "project": { + "name": "Imported", + "description": "", + "systemPrompt": None, + "defaultAgentId": None, + "metadata": { + "originalCreatedAt": "2024-01-01T00:00:00Z", + "artifactCount": file_count, + "totalSizeBytes": file_count * file_size, + }, + }, + "artifacts": [], + } + for i in range(file_count): + meta["artifacts"].append( + { + "filename": f"f{i}.txt", + "mimeType": "text/plain", + "size": file_size, + "metadata": {"source": "project"}, + } + ) + zf.writestr(f"artifacts/f{i}.txt", bytes([i % 256]) * file_size) + zf.writestr("project.json", json.dumps(meta)) + buf.seek(0) + return buf + + def test_zip_exceeding_project_limit_rejected( + self, both_enabled_client: TestClient + ): + response = both_enabled_client.post( + "/api/v1/projects/import", + files={ + "file": ( + "large.zip", + self._make_zip(4, 900 * KB), # 3.6MB > 3MB + "application/zip", + ) + }, + ) + assert response.status_code == 400 + + def test_zip_within_limits_succeeds(self, both_enabled_client: TestClient): + response = both_enabled_client.post( + "/api/v1/projects/import", + files={ + "file": ( + "valid.zip", + self._make_zip(3, 900 * KB), # 2.7MB < 3MB + "application/zip", + ) + }, + ) + assert response.status_code == 200 + assert response.json()["artifactsImported"] == 3 + + def test_zip_skips_oversized_individual_files( + self, both_enabled_client: TestClient + ): + buf = io.BytesIO() + with zipfile.ZipFile(buf, "w", zipfile.ZIP_DEFLATED) as zf: + meta = { + "version": "1.0", + "exportedAt": 1234567890, + "project": { + "name": "Mixed", + "description": "", + "systemPrompt": None, + "defaultAgentId": None, + "metadata": { + "originalCreatedAt": "2024-01-01T00:00:00Z", + "artifactCount": 3, + "totalSizeBytes": (MB + 1) + 2 * (100 * KB), + }, + }, + "artifacts": [ + { + "filename": "big.txt", + "mimeType": "text/plain", + "size": MB + 1, + "metadata": {"source": "project"}, + }, + { + "filename": "ok1.txt", + "mimeType": "text/plain", + "size": 100 * KB, + "metadata": {"source": "project"}, + }, + { + "filename": "ok2.txt", + "mimeType": "text/plain", + "size": 100 * KB, + "metadata": {"source": "project"}, + }, + ], + } + zf.writestr("artifacts/big.txt", b"x" * (MB + 1)) + zf.writestr("artifacts/ok1.txt", b"y" * (100 * KB)) + zf.writestr("artifacts/ok2.txt", b"z" * (100 * KB)) + zf.writestr("project.json", json.dumps(meta)) + buf.seek(0) + response = both_enabled_client.post( + "/api/v1/projects/import", + files={"file": ("mixed.zip", buf, "application/zip")}, + ) + assert response.status_code == 200 + assert response.json()["artifactsImported"] == 2 + + +class TestFileDeletionAndReupload: + + def test_delete_frees_space_for_upload( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "delete-reupload") + for i in range(3): + both_enabled_client.post( + "/api/v1/projects/delete-reupload/artifacts", + files=[make_file(f"f{i}.txt", 900 * KB)], + ) + + both_enabled_client.delete( + "/api/v1/projects/delete-reupload/artifacts/f0.txt" + ) + response = both_enabled_client.post( + "/api/v1/projects/delete-reupload/artifacts", + files=[make_file("f3.txt", 900 * KB)], + ) + assert response.status_code == 201 + + def test_replace_with_larger_file_exceeds_limit( + self, both_enabled_client: TestClient, gateway_adapter: GatewayAdapter + ): + seed(gateway_adapter, "replace-larger") + for i in range(3): + both_enabled_client.post( + "/api/v1/projects/replace-larger/artifacts", + files=[make_file(f"f{i}.txt", 900 * KB)], + ) + both_enabled_client.post( + "/api/v1/projects/replace-larger/artifacts", + files=[make_file("small.txt", 100 * KB)], + ) + + response = both_enabled_client.post( + "/api/v1/projects/replace-larger/artifacts", + files=[make_file("small.txt", 900 * KB)], + ) + assert response.status_code == 400