diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2a1e9ad0c..906471c43 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -254,7 +254,7 @@ jobs: - run: pytest -vv tests/integration/workflows/custom_make python-integration: - name: ${{ matrix.os }} / ${{ matrix.python }} / python + name: ${{ matrix.os }} / ${{ matrix.python }} / python (pip + uv) if: github.repository_owner == 'aws' runs-on: ${{ matrix.os }} strategy: @@ -279,8 +279,15 @@ jobs: python -m pip install --upgrade pip pip install --upgrade setuptools if: ${{ matrix.os }} == 'ubuntu-latest' && ${{ matrix.python }} == '3.12' + # Install UV for python_uv workflow tests + - name: Install UV + uses: astral-sh/setup-uv@v4 + with: + enable-cache: true - run: make init + # Test both python_pip and python_uv workflows - run: pytest -vv tests/integration/workflows/python_pip + - run: pytest -vv tests/integration/workflows/python_uv ruby-integration: name: ${{ matrix.os }} / ${{ matrix.python }} / ruby diff --git a/Makefile b/Makefile index 4f6d51d97..da89ee93e 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,7 @@ lint: ruff check aws_lambda_builders lint-fix: - ruff aws_lambda_builders --fix + ruff check aws_lambda_builders --fix # Command to run everytime you make changes to verify everything works dev: lint test diff --git a/aws_lambda_builders/workflows/__init__.py b/aws_lambda_builders/workflows/__init__.py index 37dc4ba71..d9d5512da 100644 --- a/aws_lambda_builders/workflows/__init__.py +++ b/aws_lambda_builders/workflows/__init__.py @@ -10,5 +10,6 @@ import aws_lambda_builders.workflows.nodejs_npm import aws_lambda_builders.workflows.nodejs_npm_esbuild import aws_lambda_builders.workflows.python_pip +import aws_lambda_builders.workflows.python_uv import aws_lambda_builders.workflows.ruby_bundler import aws_lambda_builders.workflows.rust_cargo diff --git a/aws_lambda_builders/workflows/python_uv/DESIGN.md b/aws_lambda_builders/workflows/python_uv/DESIGN.md new file mode 100644 index 000000000..c3818b1c0 --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/DESIGN.md @@ -0,0 +1,355 @@ +## Python - UV Lambda Builder + +### Scope + +This package provides a Python dependency builder that uses UV (An extremely fast Python package installer and resolver, written in Rust) as an alternative to the traditional pip-based workflow. The scope for this builder is to take an existing directory containing customer code, and dependency specification files (such as `pyproject.toml` or `requirements.txt`) and use UV to build and include the dependencies in the customer code bundle in a way that makes them importable in AWS Lambda. + +UV offers several advantages over pip: +- **Performance**: UV is significantly faster than pip for dependency resolution and installation +- **Better dependency resolution**: More reliable and consistent dependency resolution +- **Lock file support**: Native support for lock files for reproducible builds +- **Modern Python packaging**: Built-in support for modern Python packaging standards (PEP 517/518) +- **Virtual environment management**: Integrated virtual environment handling + +### Challenges + +Similar to the Python PIP workflow, Python packaging for AWS Lambda presents unique challenges: + +1. **Platform compatibility**: Python packages often contain platform-specific code or compiled extensions that must be compatible with the AWS Lambda runtime environment (Amazon Linux 2) + +2. **Architecture compatibility**: Packages must be compatible with the target Lambda architecture (x86_64 or arm64) + +3. **Dependency resolution complexity**: Complex dependency trees with potential conflicts need to be resolved consistently + +4. **Binary dependencies**: Some packages require compilation of C extensions or have binary dependencies that must be built for the target platform + +5. **Package size optimization**: Lambda has deployment package size limits, requiring efficient dependency packaging + +UV addresses many of these challenges through: +- Better dependency resolution algorithms +- Improved handling of platform-specific wheels +- More efficient caching mechanisms +- Better support for lock files ensuring reproducible builds + +### Interface + +The top level interface is presented by the `PythonUvDependencyBuilder` class. There will be one public method `build_dependencies`, which takes the provided arguments and builds python dependencies using UV under the hood. + +```python +def build_dependencies(artifacts_dir_path, + scratch_dir_path, + manifest_path, + architecture=None, + config=None, + ): + """Builds a python project's dependencies into an artifact directory using UV. + + Note: The runtime parameter is passed to the PythonUvDependencyBuilder constructor, + not to this method. + + :type artifacts_dir_path: str + :param artifacts_dir_path: Directory to write dependencies into. + + :type scratch_dir_path: str + :param scratch_dir_path: Temporary directory for build operations and intermediate files. + + :type manifest_path: str + :param manifest_path: Path to a dependency manifest file. Supported manifests: + - pyproject.toml (preferred for modern Python projects) + - requirements.txt (traditional pip format) + - requirements-*.txt (environment-specific: dev, test, prod, etc.) + + Note: uv.lock is NOT a valid manifest - it's a lock file that automatically + enhances pyproject.toml builds when present in the same directory. + + :type runtime: str + :param runtime: Python version to build dependencies for. This can + be python3.8, python3.9, python3.10, python3.11, python3.12, or python3.13. + These are currently the only supported values. + Note: This parameter is passed to the PythonUvDependencyBuilder constructor. + + :type config: :class:`lambda_builders.actions.python_uv.utils.UvConfig` + :param config: Optional config object for customizing UV behavior, + including cache settings, index URLs, and build options. + + :type architecture: str + :param architecture: Target architecture for Lambda compatibility (x86_64 or arm64). + Defaults to x86_64 if not specified. + """ +``` + +### Usage Pattern + +The `PythonUvDependencyBuilder` follows a constructor + method call pattern: + +```python +# 1. Create builder with runtime +builder = PythonUvDependencyBuilder( + osutils=osutils, + runtime="python3.9", # Runtime specified here + uv_runner=uv_runner +) + +# 2. Call build_dependencies method +builder.build_dependencies( + artifacts_dir_path="/path/to/artifacts", + scratch_dir_path="/path/to/scratch", + manifest_path="/path/to/pyproject.toml", + architecture="x86_64", + config=uv_config +) +``` + +### Implementation + +The general algorithm for preparing a python package using UV for use on AWS Lambda follows a streamlined approach that leverages UV's advanced capabilities: + +#### Step 1: Smart manifest detection and dispatch + +The workflow uses a smart dispatch system that recognizes actual manifest files: + +**Supported Manifests:** +- `pyproject.toml` - Modern Python project manifest (preferred) +- `requirements.txt` - Traditional pip requirements file +- `requirements-*.txt` - Environment-specific variants (dev, prod, test, etc.) + +**Smart Lock File Detection:** +- When `pyproject.toml` is the manifest, automatically checks for `uv.lock` in the same directory +- If `uv.lock` exists alongside `pyproject.toml`, uses lock-based build for precise dependencies +- If no `uv.lock`, uses standard pyproject.toml build with UV's lock and export workflow + +**Important:** `uv.lock` is NOT a standalone manifest - it's a lock file that enhances `pyproject.toml` builds when present. + +#### Step 2: Build dependencies based on manifest type + +**For pyproject.toml with uv.lock present:** +- Use `uv sync` to install exact dependencies from lock file +- Provides reproducible builds with locked dependency versions + +**For pyproject.toml without uv.lock:** +- Use `uv lock` to create temporary lock file with resolved dependencies +- Use `uv export` to convert lock file to requirements.txt format +- Install dependencies using the exported requirements + +**For requirements.txt files:** +- Use `uv pip install` directly with Lambda-compatible settings + +#### Step 3: Configure Lambda-compatible installation + +UV is configured with Lambda-specific settings: +- Target platform: `linux` (Amazon Linux 2) +- Target architecture: `x86_64` or `aarch64` +- Python version matching Lambda runtime +- Prefer wheels over source distributions for faster builds + +#### Step 4: Install to target directory + +Install resolved dependencies to the Lambda deployment package: +- Extract packages to artifacts directory +- Maintain proper Python package structure +- Ensure all packages are importable from Lambda function + +This streamlined approach leverages UV's built-in capabilities rather than manually implementing dependency resolution, compilation handling, and optimization steps that UV already performs efficiently. + +### UV-Specific Features + +This workflow leverages several UV-specific features that provide advantages over the traditional pip workflow: + +#### Lock File Support +- **Reproducible builds**: `uv.lock` files ensure identical dependency versions across builds +- **Faster subsequent builds**: Lock files eliminate dependency resolution time +- **Conflict detection**: Early detection of dependency conflicts during resolution + +#### Advanced Dependency Resolution +- **Better conflict resolution**: UV's resolver handles complex dependency graphs more reliably +- **Version range optimization**: More intelligent selection of compatible versions +- **Platform-aware resolution**: Better handling of platform-specific dependencies + +#### Performance Optimizations +- **Parallel downloads**: Multiple packages downloaded simultaneously +- **Efficient caching**: Smart caching reduces redundant downloads and builds +- **Fast installs**: Rust-based implementation provides significant speed improvements + +#### Modern Python Standards +- **PEP 517/518 support**: Native support for modern Python packaging standards +- **pyproject.toml first**: Preferred support for modern project configuration +- **Build isolation**: Proper build environment isolation for reliable builds + +### Error Handling and Diagnostics + +The UV workflow provides enhanced error handling: + +1. **Dependency resolution errors**: Clear reporting of version conflicts and resolution failures +2. **Platform compatibility warnings**: Explicit warnings about potential platform issues +3. **Build failures**: Detailed error messages for compilation and build failures +4. **Lock file conflicts**: Detection and reporting of lock file inconsistencies +5. **Performance metrics**: Optional reporting of build times and cache efficiency + +### Configuration Options + +The workflow supports various configuration options through the config parameter: + +```python +config = { + "index_url": "https://pypi.org/simple/", # Custom package index + "extra_index_urls": [], # Additional package indexes + "cache_dir": "/tmp/uv-cache", # Custom cache directory + "no_cache": False, # Disable caching + "prerelease": "disallow", # Handle pre-release versions + "resolution": "highest", # Resolution strategy + "compile_bytecode": True, # Compile .pyc files + "exclude_newer": None, # Exclude packages newer than date + "generate_hashes": False, # Generate package hashes +} +``` + +### Compatibility with Existing Workflows + +The UV workflow is designed to be a drop-in replacement for the pip workflow: +- Supports the same manifest formats (requirements.txt, pyproject.toml) +- Uses native UV commands for pyproject.toml (lock/export workflow) +- Maintains the same output structure and package layout +- Compatible with existing Lambda deployment processes +- Provides migration path from pip-based builds +- Follows established requirements file naming conventions + +### Architecture Components + +The UV workflow consists of several key components that mirror the PIP workflow structure: + +#### Core Classes + +1. **PythonUvWorkflow**: Main workflow class that orchestrates the build process +2. **PythonUvBuildAction**: Action class that handles dependency resolution +3. **UvRunner**: Wrapper around UV command execution +4. **SubprocessUv**: Low-level UV subprocess interface +6. **PythonUvDependencyBuilder**: High-level dependency builder orchestrator + +#### File Structure +``` +python_uv/ +├── __init__.py +├── DESIGN.md +├── workflow.py # Main workflow implementation +├── actions.py # Build actions +├── packager.py # Core packaging logic +├── utils.py # Utility functions +└── exceptions.py # UV-specific exceptions +``` + +#### Capability Definition +```python +CAPABILITY = Capability( + language="python", + dependency_manager="uv", + application_framework=None +) +``` + +#### Smart Manifest Detection and Dispatch + +The workflow uses intelligent manifest detection: + +**Supported Manifests (in order of preference):** +1. `pyproject.toml` - Modern Python project manifest (preferred) +2. `requirements.txt` - Standard pip format +3. `requirements-*.txt` - Environment-specific variants (dev, test, prod, etc.) + +**Smart Lock File Enhancement:** +- When `pyproject.toml` is used, automatically detects `uv.lock` in the same directory +- If `uv.lock` exists, uses lock-based build for reproducible dependencies +- If no `uv.lock`, uses standard pyproject.toml workflow with UV's lock and export + +**Important:** `uv.lock` is NOT a standalone manifest - attempting to use it as one will result in an "Unsupported manifest file" error. + +#### Requirements File Naming Conventions +The workflow follows Python ecosystem standards for requirements files: +- `requirements.txt` - Standard format (primary) +- `requirements-dev.txt` - Development dependencies +- `requirements-test.txt` - Test dependencies +- `requirements-prod.txt` - Production dependencies +- `requirements-staging.txt` - Staging dependencies + +Note: `requirements.in` (pip-tools format) is not supported to keep the implementation simple and focused. + +#### UV Binary Requirements +- UV must be available on the system PATH +- Minimum UV version: 0.1.0 (to be determined based on feature requirements) +- Fallback: Attempt to install UV using pip if not found (optional behavior) + +#### Error Handling Strategy +- **MissingUvError**: UV binary not found on PATH +- **UvInstallationError**: UV installation/setup failures +- **UvResolutionError**: Dependency resolution failures +- **UvBuildError**: Package build failures +- **LockFileError**: Lock file parsing or validation errors + +#### Platform Compatibility Matrix +| Python Version | x86_64 | arm64 | Status | +|---------------|--------|-------|---------| +| python3.8 | ✓ | ✓ | Supported | +| python3.9 | ✓ | ✓ | Supported | +| python3.10 | ✓ | ✓ | Supported | +| python3.11 | ✓ | ✓ | Supported | +| python3.12 | ✓ | ✓ | Supported | +| python3.13 | ✓ | ✓ | Supported | + +#### Integration with Lambda Builders +- Registers with the workflow registry automatically +- Follows the same build lifecycle as other workflows +- Compatible with existing SAM CLI integration +- Supports all standard build options (scratch_dir, dependencies_dir, etc.) + +### Implementation Phases + +#### Phase 1: Core Infrastructure +1. Basic workflow and action classes +2. UV binary detection and validation +3. Simple requirements.txt support +4. Basic error handling + +#### Phase 2: Advanced Features +1. pyproject.toml support +2. Lock file handling +3. Advanced configuration options +4. Performance optimizations + +#### Phase 3: Production Readiness +1. Comprehensive testing +2. Error message improvements +3. Documentation and examples +4. Performance benchmarking + +### Testing Strategy + +#### Unit Tests +- UV binary detection and validation +- Manifest file parsing and detection +- Dependency resolution logic +- Error handling scenarios +- Platform compatibility checks + +#### Integration Tests +- End-to-end build scenarios +- Different manifest file formats +- Lock file generation and usage +- Multi-architecture builds +- Performance comparisons with pip + +#### Compatibility Tests +- Migration from pip to UV workflows +- Existing SAM CLI integration +- Various Python project structures +- Different dependency complexity levels + +### Future Enhancements + +Potential future improvements to the UV workflow: +- **Dependency vulnerability scanning**: Integration with security scanning tools +- **Package size optimization**: Advanced techniques for reducing package size +- **Multi-platform builds**: Support for building packages for multiple architectures simultaneously +- **Custom build hooks**: Support for custom build steps and transformations +- **Integration with other tools**: Better integration with other Python development tools +- **UV auto-installation**: Automatic UV installation if not present on system +- **Build caching**: Advanced caching strategies for faster subsequent builds +- **Dependency analysis**: Detailed dependency tree analysis and reporting diff --git a/aws_lambda_builders/workflows/python_uv/__init__.py b/aws_lambda_builders/workflows/python_uv/__init__.py new file mode 100644 index 000000000..02d30a9b0 --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/__init__.py @@ -0,0 +1,5 @@ +""" +Builds Python Lambda functions using UV dependency manager +""" + +from .workflow import PythonUvWorkflow diff --git a/aws_lambda_builders/workflows/python_uv/actions.py b/aws_lambda_builders/workflows/python_uv/actions.py new file mode 100644 index 000000000..35c0c35eb --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/actions.py @@ -0,0 +1,103 @@ +""" +Action to resolve Python dependencies using UV +""" + +import logging +from typing import Optional + +from aws_lambda_builders.actions import ActionFailedError, BaseAction, Purpose +from aws_lambda_builders.architecture import X86_64 + +from .exceptions import MissingUvError, UvBuildError, UvInstallationError +from .packager import PythonUvDependencyBuilder, SubprocessUv, UvRunner +from .utils import OSUtils, UvConfig + +LOG = logging.getLogger(__name__) + + +class PythonUvBuildAction(BaseAction): + """Action for building Python dependencies using UV.""" + + NAME = "ResolveDependencies" + DESCRIPTION = "Installing dependencies using UV" + PURPOSE = Purpose.RESOLVE_DEPENDENCIES + LANGUAGE = "python" + + def __init__( + self, + artifacts_dir, + scratch_dir, + manifest_path, + runtime, + dependencies_dir, + binaries, + architecture=X86_64, + config: Optional[UvConfig] = None, + ): + self.artifacts_dir = artifacts_dir + self.manifest_path = manifest_path + self.scratch_dir = scratch_dir + self.runtime = runtime + self.dependencies_dir = dependencies_dir + self.binaries = binaries + self.architecture = architecture + self.config = config or UvConfig() + + self._os_utils = OSUtils() + + def execute(self) -> None: + """Execute the build action for Python UV workflows.""" + try: + # Initialize UV components + uv_subprocess = SubprocessUv(osutils=self._os_utils) + uv_runner = UvRunner(uv_subprocess=uv_subprocess, osutils=self._os_utils) + + # Create main package builder + package_builder = PythonUvDependencyBuilder( + osutils=self._os_utils, + runtime=self.runtime, + uv_runner=uv_runner, + ) + + # Determine target directory + target_artifact_dir = self.artifacts_dir + if self.dependencies_dir: + target_artifact_dir = self.dependencies_dir + + # Build dependencies + package_builder.build_dependencies( + artifacts_dir_path=target_artifact_dir, + scratch_dir_path=self.scratch_dir, + manifest_path=self.manifest_path, + architecture=self.architecture, + config=self.config, + ) + + LOG.info("Successfully built Python dependencies using UV") + + except (MissingUvError, UvInstallationError, UvBuildError) as ex: + raise ActionFailedError(str(ex)) + except Exception as ex: + LOG.error("Unexpected error during UV build: %s", str(ex)) + raise ActionFailedError(f"UV build failed: {str(ex)}") + + +class CopyDependenciesAction(BaseAction): + """ + Custom action for copying dependencies from dependencies_dir to artifacts_dir. + Provides clear action name to distinguish from source code copying. + """ + + NAME = "CopyDependencies" + DESCRIPTION = "Copying dependencies from dependencies directory to artifacts directory" + PURPOSE = Purpose.COPY_SOURCE + + def __init__(self, dependencies_dir, artifacts_dir): + from aws_lambda_builders.actions import copytree + + self.dependencies_dir = dependencies_dir + self.artifacts_dir = artifacts_dir + self._copytree = copytree + + def execute(self): + self._copytree(self.dependencies_dir, self.artifacts_dir, maintain_symlinks=False) diff --git a/aws_lambda_builders/workflows/python_uv/exceptions.py b/aws_lambda_builders/workflows/python_uv/exceptions.py new file mode 100644 index 000000000..7c64bb79c --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/exceptions.py @@ -0,0 +1,41 @@ +""" +Python UV specific workflow exceptions. +""" + +from aws_lambda_builders.exceptions import LambdaBuilderError + + +class MissingUvError(LambdaBuilderError): + """Exception raised when UV executable is not found.""" + + MESSAGE = "uv executable not found in your environment. Please install uv: https://docs.astral.sh/uv/getting-started/installation/" + + +class UvInstallationError(LambdaBuilderError): + """Exception raised when UV installation or setup fails.""" + + MESSAGE = "Failed to install dependencies using uv: {reason}" + + +class UvResolutionError(LambdaBuilderError): + """Exception raised when UV dependency resolution fails.""" + + MESSAGE = "UV dependency resolution failed: {reason}" + + +class UvBuildError(LambdaBuilderError): + """Exception raised when UV package build fails.""" + + MESSAGE = "UV package build failed: {reason}" + + +class LockFileError(LambdaBuilderError): + """Exception raised when lock file operations fail.""" + + MESSAGE = "Lock file operation failed: {reason}" + + +class ManifestNotFoundError(LambdaBuilderError): + """Exception raised when no supported manifest file is found.""" + + MESSAGE = "No supported dependency manifest found. Expected one of: {supported_manifests}" diff --git a/aws_lambda_builders/workflows/python_uv/packager.py b/aws_lambda_builders/workflows/python_uv/packager.py new file mode 100644 index 000000000..25b6f39b3 --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/packager.py @@ -0,0 +1,504 @@ +""" +UV-based Python dependency packager for AWS Lambda +""" + +import logging +import os +from typing import Dict, List, Optional + +from aws_lambda_builders.architecture import ARM64, X86_64 + +from .exceptions import LockFileError, MissingUvError, UvBuildError, UvInstallationError +from .utils import OSUtils, UvConfig, get_uv_version + +LOG = logging.getLogger(__name__) + + +class SubprocessUv: + """Low-level interface for executing UV commands via subprocess.""" + + def __init__(self, osutils: Optional[OSUtils] = None): + if osutils is None: + osutils = OSUtils() + self._osutils = osutils + self._uv_executable = self._find_uv_executable() + + def _find_uv_executable(self) -> str: + """Find UV executable in PATH.""" + uv_path = self._osutils.which("uv") + if not uv_path: + raise MissingUvError() + return uv_path + + @property + def uv_executable(self) -> str: + """Get UV executable path.""" + return self._uv_executable + + def run_uv_command(self, args: List[str], cwd: Optional[str] = None, env: Optional[Dict[str, str]] = None) -> tuple: + """ + Execute UV command with given arguments. + + Args: + args: UV command arguments + cwd: Working directory + env: Environment variables + + Returns: + Tuple of (return_code, stdout, stderr) + """ + cmd = [self._uv_executable] + args + LOG.debug("Executing UV command: %s", " ".join(cmd)) + + rc, stdout, stderr = self._osutils.run_subprocess(cmd, cwd=cwd, env=env) + + LOG.debug("UV command return code: %d", rc) + LOG.debug("UV stdout: %s", stdout) + LOG.debug("UV stderr: %s", stderr) + + return rc, stdout, stderr + + +class UvRunner: + """High-level wrapper around UV operations.""" + + def __init__(self, uv_subprocess: Optional[SubprocessUv] = None, osutils: Optional[OSUtils] = None): + if osutils is None: + osutils = OSUtils() + if uv_subprocess is None: + uv_subprocess = SubprocessUv(osutils) + + self._uv = uv_subprocess + self._osutils = osutils + + @property + def uv_version(self) -> Optional[str]: + """Get UV version.""" + return get_uv_version(self._uv.uv_executable, self._osutils) + + def sync_dependencies( + self, + target_dir: str, + scratch_dir: str, + config: Optional[UvConfig] = None, + python_version: Optional[str] = None, + platform: Optional[str] = None, + architecture: Optional[str] = None, + manifest_path: Optional[str] = None, + project_dir: Optional[str] = None, + ) -> None: + """ + Sync dependencies using UV. + + Args: + target_dir: Directory to install dependencies + scratch_dir: Scratch directory for temporary operations + config: UV configuration options + python_version: Target Python version (e.g., "3.9") + platform: Target platform (e.g., "linux") + architecture: Target architecture (e.g., "x86_64") + manifest_path: Path to dependency manifest file (for backwards compatibility) + project_dir: Project directory containing pyproject.toml and uv.lock + """ + if config is None: + config = UvConfig() + + # Determine project directory + if project_dir: + working_dir = project_dir + elif manifest_path: + # Backwards compatibility: derive project dir from manifest path + working_dir = os.path.dirname(manifest_path) + else: + raise ValueError("Either project_dir or manifest_path must be provided") + + # Ensure UV cache is configured to use scratch directory + if not config.cache_dir: + config.cache_dir = os.path.join(scratch_dir, "uv-cache") + # Use exist_ok equivalent for osutils + if not os.path.exists(config.cache_dir): + self._osutils.makedirs(config.cache_dir) + + args = ["sync"] + + # Add configuration arguments + args.extend(config.to_uv_args()) + + # Add platform-specific arguments + if python_version: + args.extend(["--python", python_version]) + + # Note: uv sync doesn't support --platform or --arch arguments + # It uses the current environment's platform by default + + # Execute UV sync - it automatically finds pyproject.toml and uv.lock in working_dir + rc, stdout, stderr = self._uv.run_uv_command(args, cwd=working_dir) + + if rc != 0: + raise UvInstallationError(reason=f"UV sync failed: {stderr}") + + # Copy dependencies from virtual environment to target directory + # uv sync creates a .venv directory in the project directory + venv_site_packages = os.path.join(working_dir, ".venv", "lib", f"python{python_version}", "site-packages") + + if os.path.exists(venv_site_packages): + # Copy all site-packages contents to target directory + import shutil + + for item in os.listdir(venv_site_packages): + src_path = os.path.join(venv_site_packages, item) + dst_path = os.path.join(target_dir, item) + + if os.path.isdir(src_path): + self._osutils.copytree(src_path, dst_path) + else: + shutil.copy2(src_path, dst_path) + + def install_requirements( + self, + requirements_path: str, + target_dir: str, + scratch_dir: str, + config: Optional[UvConfig] = None, + python_version: Optional[str] = None, + platform: Optional[str] = None, + architecture: Optional[str] = None, + ) -> None: + """ + Install requirements using UV pip interface. + + Args: + requirements_path: Path to requirements.txt file + target_dir: Directory to install dependencies + scratch_dir: Scratch directory for temporary operations + config: UV configuration options + python_version: Target Python version + platform: Target platform + architecture: Target architecture + """ + if config is None: + config = UvConfig() + + # Ensure UV cache is configured to use scratch directory + if not config.cache_dir: + config.cache_dir = os.path.join(scratch_dir, "uv-cache") + # Use exist_ok equivalent for osutils + if not os.path.exists(config.cache_dir): + self._osutils.makedirs(config.cache_dir) + + args = ["pip", "install"] + + # Add requirements file + args.extend(["-r", requirements_path]) + + # Add target directory + args.extend(["--target", target_dir]) + + # Add configuration arguments + args.extend(config.to_uv_args()) + + # Add platform-specific arguments + if python_version: + args.extend(["--python-version", python_version]) + + if platform and architecture: + # UV pip install uses --python-platform format + # Map Lambda architectures to UV platform strings + platform_mapping = { + ("linux", X86_64): "x86_64-unknown-linux-gnu", + ("linux", ARM64): "aarch64-unknown-linux-gnu", + } + + platform_key = (platform, architecture) + if platform_key in platform_mapping: + args.extend(["--python-platform", platform_mapping[platform_key]]) + + # Execute UV pip install + rc, stdout, stderr = self._uv.run_uv_command(args) + + if rc != 0: + raise UvInstallationError(reason=f"UV pip install failed: {stderr}") + + +class PythonUvDependencyBuilder: + """High-level dependency builder that orchestrates UV operations.""" + + def __init__( + self, osutils: Optional[OSUtils] = None, runtime: Optional[str] = None, uv_runner: Optional[UvRunner] = None + ): + if osutils is None: + osutils = OSUtils() + if uv_runner is None: + uv_runner = UvRunner(osutils=osutils) + + self._osutils = osutils + self._uv_runner = uv_runner + self.runtime = runtime + + def build_dependencies( + self, + artifacts_dir_path: str, + scratch_dir_path: str, + manifest_path: str, + architecture: str = X86_64, + config: Optional[UvConfig] = None, + ) -> None: + """ + Build Python dependencies using UV. + + Args: + artifacts_dir_path: Directory to write dependencies + scratch_dir_path: Temporary directory for build operations + manifest_path: Path to dependency manifest file + architecture: Target architecture (X86_64 or ARM64) + config: UV configuration options + """ + LOG.info("Building Python dependencies using UV") + LOG.info("Manifest file: %s", manifest_path) + LOG.info("Target architecture: %s", architecture) + LOG.info("Using scratch directory: %s", scratch_dir_path) + + if config is None: + config = UvConfig() + + # Configure UV to use scratch directory for cache if not already set + if not config.cache_dir: + uv_cache_dir = os.path.join(scratch_dir_path, "uv-cache") + # Use exist_ok equivalent for osutils + if not os.path.exists(uv_cache_dir): + self._osutils.makedirs(uv_cache_dir) + config.cache_dir = uv_cache_dir + LOG.debug("Configured UV cache directory: %s", uv_cache_dir) + + # Determine Python version from runtime + python_version = self._extract_python_version(self.runtime) + + # Determine manifest type and build accordingly + manifest_name = os.path.basename(manifest_path) + + try: + # Get the appropriate handler for this manifest + handler = self._get_manifest_handler(manifest_name) + + # Execute the handler + handler(manifest_path, artifacts_dir_path, scratch_dir_path, python_version, architecture, config) + + except Exception as e: + LOG.error("Failed to build dependencies: %s", str(e)) + raise + + def _get_manifest_handler(self, manifest_name: str): + """Get the appropriate handler function for a manifest file.""" + # Exact match handlers for ACTUAL manifests + exact_handlers = { + "pyproject.toml": self._handle_pyproject_build, + } + + # Check for exact match first + if manifest_name in exact_handlers: + return exact_handlers[manifest_name] + + # Check for requirements file pattern + if self._is_requirements_file(manifest_name): + return self._build_from_requirements + + # Generic unsupported file - covers uv.lock and everything else + raise UvBuildError(reason=f"Unsupported manifest file: {manifest_name}") + + def _handle_pyproject_build( + self, + manifest_path: str, + target_dir: str, + scratch_dir: str, + python_version: str, + architecture: str, + config: UvConfig, + ) -> None: + """ + Smart pyproject.toml handler that checks for uv.lock. + + If uv.lock exists alongside pyproject.toml, use lock-based build for more precise dependency resolution. + Otherwise, use standard pyproject.toml build. + """ + manifest_dir = os.path.dirname(manifest_path) + uv_lock_path = os.path.join(manifest_dir, "uv.lock") + + if os.path.exists(uv_lock_path): + LOG.info("Found uv.lock alongside pyproject.toml - using lock-based build for precise dependencies") + # Use lock file for more precise builds + self._build_from_lock_file(uv_lock_path, target_dir, scratch_dir, python_version, architecture, config) + else: + # Standard pyproject.toml build + self._build_from_pyproject(manifest_path, target_dir, scratch_dir, python_version, architecture, config) + + def _is_requirements_file(self, filename: str) -> bool: + """ + Check if a filename represents a valid requirements file. + + Follows Python ecosystem conventions: + - requirements.txt (standard) + - requirements-*.txt (environment-specific: dev, test, prod, etc.) + """ + if filename == "requirements.txt": + return True + + # Allow environment-specific requirements files like requirements-dev.txt + # Must have at least one character after the dash and before .txt + if ( + filename.startswith("requirements-") + and filename.endswith(".txt") + and len(filename) > len("requirements-.txt") + ): + return True + + return False + + def _build_from_lock_file( + self, + lock_path: str, + target_dir: str, + scratch_dir: str, + python_version: str, + architecture: str, + config: UvConfig, + ) -> None: + """Build dependencies from uv.lock file.""" + LOG.info("Building from UV lock file") + + try: + # For uv sync, we need the project directory (where pyproject.toml and uv.lock are) + # uv sync automatically finds both files in the working directory + project_dir = os.path.dirname(lock_path) + + self._uv_runner.sync_dependencies( + project_dir=project_dir, # Pass project directory instead of lock path + target_dir=target_dir, + scratch_dir=scratch_dir, + config=config, + python_version=python_version, + platform="linux", + architecture=architecture, + ) + except Exception as e: + raise LockFileError(reason=str(e)) + + def _build_from_pyproject( + self, + pyproject_path: str, + target_dir: str, + scratch_dir: str, + python_version: str, + architecture: str, + config: UvConfig, + ) -> None: + """Build dependencies from pyproject.toml file using UV's native workflow.""" + LOG.info("Building from pyproject.toml using UV lock and export") + + try: + # Use UV's native workflow: lock -> export -> install + temp_requirements = self._export_pyproject_to_requirements(pyproject_path, scratch_dir, python_version) + + if temp_requirements: + self._uv_runner.install_requirements( + requirements_path=temp_requirements, + target_dir=target_dir, + scratch_dir=scratch_dir, + config=config, + python_version=python_version, + platform="linux", + architecture=architecture, + ) + else: + LOG.info("No dependencies found in pyproject.toml") + + except Exception as e: + raise UvBuildError(reason=f"Failed to build from pyproject.toml: {str(e)}") + + def _export_pyproject_to_requirements( + self, pyproject_path: str, scratch_dir: str, python_version: str + ) -> Optional[str]: + """Use UV's native lock and export to convert pyproject.toml to requirements.txt.""" + project_dir = os.path.dirname(pyproject_path) + + try: + # Step 1: Create lock file using UV + LOG.debug("Creating lock file from pyproject.toml") + lock_args = ["lock", "--no-progress"] + + if python_version: + lock_args.extend(["--python", python_version]) + + rc, stdout, stderr = self._uv_runner._uv.run_uv_command(lock_args, cwd=project_dir) + + if rc != 0: + LOG.warning(f"UV lock failed: {stderr}") + return None + + # Step 2: Export lock file to requirements.txt format + LOG.debug("Exporting lock file to requirements.txt format") + temp_requirements = os.path.join(scratch_dir, "exported_requirements.txt") + + export_args = [ + "export", + "--format", + "requirements.txt", + "--no-emit-project", # Don't include the project itself, only dependencies + "--no-header", # Skip comment header + "--no-hashes", # Skip hashes for cleaner output (optional) + "--output-file", + temp_requirements, + ] + + rc, stdout, stderr = self._uv_runner._uv.run_uv_command(export_args, cwd=project_dir) + + if rc != 0: + LOG.warning(f"UV export failed: {stderr}") + return None + + # Verify the requirements file was created and has content + if os.path.exists(temp_requirements) and os.path.getsize(temp_requirements) > 0: + LOG.debug(f"Successfully exported dependencies to {temp_requirements}") + return temp_requirements + else: + LOG.info("No dependencies to export from pyproject.toml") + return None + + except Exception as e: + LOG.warning(f"Failed to export pyproject.toml using UV native workflow: {e}") + return None + + def _build_from_requirements( + self, + requirements_path: str, + target_dir: str, + scratch_dir: str, + python_version: str, + architecture: str, + config: UvConfig, + ) -> None: + """Build dependencies from requirements.txt file.""" + LOG.info("Building from requirements file") + + try: + self._uv_runner.install_requirements( + requirements_path=requirements_path, + target_dir=target_dir, + scratch_dir=scratch_dir, + config=config, + python_version=python_version, + platform="linux", + architecture=architecture, + ) + except Exception as e: + raise UvBuildError(reason=f"Failed to build from requirements: {str(e)}") + + def _extract_python_version(self, runtime: str) -> str: + """Extract Python version from runtime string.""" + if not runtime: + raise UvBuildError(reason="Runtime is required but was not provided") + + # Extract version from runtime like "python3.9" -> "3.9" + if runtime.startswith("python"): + return runtime.replace("python", "") + + return runtime diff --git a/aws_lambda_builders/workflows/python_uv/utils.py b/aws_lambda_builders/workflows/python_uv/utils.py new file mode 100644 index 000000000..88523d02c --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/utils.py @@ -0,0 +1,148 @@ +""" +Commonly used utilities for Python UV workflow +""" + +import os +import shutil +import subprocess +from typing import List, Optional + +from aws_lambda_builders.workflows.python_pip.utils import OSUtils as BaseOSUtils + +EXPERIMENTAL_FLAG_BUILD_PERFORMANCE = "experimentalBuildPerformance" + + +class OSUtils(BaseOSUtils): + """Extended OS utilities for UV workflow.""" + + def which(self, executable): + """Find executable in PATH.""" + return shutil.which(executable) + + def run_subprocess(self, cmd, cwd=None, env=None): + """Run subprocess and return result.""" + if env is None: + env = self.original_environ() + + try: + result = subprocess.run(cmd, cwd=cwd, env=env, capture_output=True, text=True, check=False) + return result.returncode, result.stdout, result.stderr + except Exception as e: + return 1, "", str(e) + + +def detect_uv_manifest(source_dir: str) -> Optional[str]: + """ + Detect UV-compatible manifest files in order of preference. + + Note: uv.lock is NOT a manifest - it's a lock file that accompanies pyproject.toml. + UV workflows support these manifest types: + 1. pyproject.toml (preferred) - may have accompanying uv.lock + 2. requirements.txt and variants - traditional pip-style manifests + + Args: + source_dir: Directory to search for manifest files + + Returns: + Path to the detected manifest file, or None if not found + """ + # Check for pyproject.toml first (preferred manifest) + pyproject_path = os.path.join(source_dir, "pyproject.toml") + if os.path.isfile(pyproject_path): + return pyproject_path + + # Check for requirements.txt variants (in order of preference) + requirements_variants = [ + "requirements.txt", + "requirements-dev.txt", + "requirements-test.txt", + "requirements-prod.txt", + ] + + for requirements_file in requirements_variants: + requirements_path = os.path.join(source_dir, requirements_file) + if os.path.isfile(requirements_path): + return requirements_path + + return None + + +def get_uv_version(uv_executable: str, osutils: OSUtils) -> Optional[str]: + """ + Get UV version from the executable. + + Args: + uv_executable: Path to UV executable + osutils: OS utilities instance + + Returns: + UV version string or None if unable to determine + """ + try: + rc, stdout, stderr = osutils.run_subprocess([uv_executable, "--version"]) + if rc == 0 and stdout: + # UV version output format: "uv 0.1.0" + parts = stdout.strip().split() + min_parts_for_version = 2 + if len(parts) >= min_parts_for_version: + return parts[1] + except Exception: + pass + + return None + + +class UvConfig: + """Configuration class for UV operations.""" + + def __init__( + self, + index_url: Optional[str] = None, + extra_index_urls: Optional[List[str]] = None, + cache_dir: Optional[str] = None, + no_cache: bool = False, + prerelease: str = "disallow", + resolution: str = "highest", + compile_bytecode: bool = True, + exclude_newer: Optional[str] = None, + generate_hashes: bool = False, + ): + self.index_url = index_url + self.extra_index_urls = extra_index_urls or [] + self.cache_dir = cache_dir + self.no_cache = no_cache + self.prerelease = prerelease + self.resolution = resolution + self.compile_bytecode = compile_bytecode + self.exclude_newer = exclude_newer + self.generate_hashes = generate_hashes + + def to_uv_args(self) -> List[str]: + """Convert configuration to UV command line arguments.""" + args = [] + + if self.index_url: + args.extend(["--index-url", self.index_url]) + + for extra_url in self.extra_index_urls: + args.extend(["--extra-index-url", extra_url]) + + if self.cache_dir: + args.extend(["--cache-dir", self.cache_dir]) + + if self.no_cache: + args.append("--no-cache") + + if self.prerelease != "disallow": + args.extend(["--prerelease", self.prerelease]) + + if self.resolution != "highest": + args.extend(["--resolution", self.resolution]) + + if self.exclude_newer: + args.extend(["--exclude-newer", self.exclude_newer]) + + if self.generate_hashes: + args.append("--generate-hashes") + + return args diff --git a/aws_lambda_builders/workflows/python_uv/workflow.py b/aws_lambda_builders/workflows/python_uv/workflow.py new file mode 100644 index 000000000..ddf11ee18 --- /dev/null +++ b/aws_lambda_builders/workflows/python_uv/workflow.py @@ -0,0 +1,182 @@ +""" +Python UV Workflow +""" + +import logging + +from aws_lambda_builders.actions import CleanUpAction, CopySourceAction +from aws_lambda_builders.path_resolver import PathResolver +from aws_lambda_builders.workflow import BaseWorkflow, BuildDirectory, BuildInSourceSupport, Capability + +from .actions import CopyDependenciesAction, PythonUvBuildAction +from .utils import OSUtils, detect_uv_manifest + +LOG = logging.getLogger(__name__) + + +class PythonUvWorkflow(BaseWorkflow): + """ + Workflow for building Python projects using UV. + + This workflow supports multiple manifest types: + - uv.lock (UV lock file for exact reproducible builds) + - pyproject.toml (modern Python projects with UV support) + - requirements.txt (traditional pip format) + - requirements-*.txt (environment-specific requirements) + + The workflow uses these BaseWorkflow contract properties: + - download_dependencies: Whether to download/install dependencies (default: True) + - dependencies_dir: Optional separate directory for dependencies (default: None) + - combine_dependencies: Whether to copy dependencies to artifacts dir (default: True) + """ + + NAME = "PythonUvBuilder" + + CAPABILITY = Capability(language="python", dependency_manager="uv", application_framework=None) + + # Common source files to exclude from build artifacts output + # Based on Python PIP workflow with UV-specific additions + EXCLUDED_FILES = ( + ".aws-sam", + ".chalice", + ".git", + ".gitignore", + # Compiled files + "*.pyc", + "__pycache__", + "*.so", + # Distribution / packaging + ".Python", + "*.egg-info", + "*.egg", + # Installer logs + "pip-log.txt", + "pip-delete-this-directory.txt", + # Unit test / coverage reports + "htmlcov", + ".tox", + ".nox", + ".coverage", + ".cache", + ".pytest_cache", + # pyenv + ".python-version", + # mypy, Pyre + ".mypy_cache", + ".dmypy.json", + ".pyre", + # environments + ".env", + ".venv", + "venv", + "venv.bak", + "env.bak", + "ENV", + "env", + # UV specific + ".uv-cache", + "uv.lock.bak", + # Editors + ".vscode", + ".idea", + ) + + PYTHON_VERSION_THREE = "3" + + DEFAULT_BUILD_DIR = BuildDirectory.SCRATCH + BUILD_IN_SOURCE_SUPPORT = BuildInSourceSupport.NOT_SUPPORTED + + def __init__(self, source_dir, artifacts_dir, scratch_dir, manifest_path, runtime=None, osutils=None, **kwargs): + super(PythonUvWorkflow, self).__init__( + source_dir, artifacts_dir, scratch_dir, manifest_path, runtime=runtime, **kwargs + ) + + if osutils is None: + osutils = OSUtils() + + # Auto-detect manifest if not provided or doesn't exist + if not manifest_path or not osutils.file_exists(manifest_path): + detected_manifest = detect_uv_manifest(source_dir) + if detected_manifest: + manifest_path = detected_manifest + LOG.info(f"Auto-detected manifest file: {manifest_path}") + else: + LOG.warning( + "No UV-compatible manifest file found (pyproject.toml, requirements.txt). " + "Continuing the build without dependencies." + ) + manifest_path = None + + self._setup_build_actions(source_dir, artifacts_dir, scratch_dir, manifest_path, runtime) + + def _setup_build_actions(self, source_dir, artifacts_dir, scratch_dir, manifest_path, runtime): + """ + Set up the build actions based on configuration. + + Hybrid approach (matches python_pip workflow): + - Simple case (dependencies_dir=None): Install deps directly to artifacts_dir, copy source + - Advanced case (dependencies_dir provided): Install to dependencies_dir, copy deps, copy source + + This provides the best of both worlds - simple by default, flexible when needed. + """ + self.actions = [] + + # Build dependencies if we have a manifest and download_dependencies is enabled + if manifest_path and self.download_dependencies: + # Determine target: dependencies_dir if provided, otherwise artifacts_dir (hybrid approach) + target_dir = self.dependencies_dir if self.dependencies_dir else artifacts_dir + + if self.dependencies_dir: + # Advanced case: Clean up the dependencies folder before installing + self.actions.append(CleanUpAction(self.dependencies_dir)) + + self.actions.append( + PythonUvBuildAction( + target_dir, # Install to dependencies_dir OR artifacts_dir + scratch_dir, + manifest_path, + runtime, + self.dependencies_dir, # Pass for action's internal logic + binaries=self.binaries, + architecture=self.architecture, + ) + ) + + # Advanced case: Copy dependencies from dependencies_dir to artifacts_dir if configured + if self.dependencies_dir and self.combine_dependencies: + self.actions.append(CopyDependenciesAction(self.dependencies_dir, artifacts_dir)) + + # Always copy source code (final step) + self.actions.append(CopySourceAction(source_dir, artifacts_dir, excludes=self.EXCLUDED_FILES)) + + def get_resolvers(self): + """ + Get path resolvers for finding Python and UV binaries. + + Returns specialized Python path resolver that looks for additional binaries + in addition to the language specific binary. + """ + return [ + PathResolver( + runtime=self.runtime, + binary=self.CAPABILITY.language, + additional_binaries=self._get_additional_binaries(), + executable_search_paths=self.executable_search_paths, + ) + ] + + def _get_additional_binaries(self): + """Get additional Python binaries to search for.""" + # python3 is an additional binary that has to be considered in addition to the original python binary, + # when the specified python runtime is 3.x + major, _ = self.runtime.replace(self.CAPABILITY.language, "").split(".") + return [f"{self.CAPABILITY.language}{major}"] if major == self.PYTHON_VERSION_THREE else None + + def get_validators(self): + """Get runtime validators. + + UV has robust built-in Python version handling and can automatically + find, download, and manage Python versions. Unlike pip, UV doesn't need + external validation of Python runtime paths. + """ + return [] diff --git a/tests/functional/workflows/python_uv/__init__.py b/tests/functional/workflows/python_uv/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/functional/workflows/python_uv/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/functional/workflows/python_uv/test_packager.py b/tests/functional/workflows/python_uv/test_packager.py new file mode 100644 index 000000000..6c9e02b31 --- /dev/null +++ b/tests/functional/workflows/python_uv/test_packager.py @@ -0,0 +1,665 @@ +import sys +import os +import zipfile +import tarfile +import io +from collections import defaultdict, namedtuple +from unittest import TestCase, mock + +import pytest + +from aws_lambda_builders.architecture import ARM64, X86_64 +from aws_lambda_builders.workflows.python_uv.packager import SubprocessUv, UvRunner, PythonUvDependencyBuilder +from aws_lambda_builders.workflows.python_uv.exceptions import MissingUvError, UvInstallationError, UvBuildError +from aws_lambda_builders.workflows.python_uv.utils import OSUtils, UvConfig + + +FakeUvCall = namedtuple("FakeUvEntry", ["args", "env_vars", "cwd"]) + + +def _create_app_structure(tmpdir): + appdir = tmpdir.mkdir("app") + appdir.join("app.py").write("# Test app") + return appdir + + +class FakeUv(object): + """Mock UV executable for functional testing""" + + def __init__(self): + self._calls = defaultdict(lambda: []) + self._call_history = [] + self._side_effects = defaultdict(lambda: []) + self._return_tuple = (0, b"", b"") + + def run_uv_command(self, args, cwd=None, env=None): + """Mock UV command execution""" + cmd = args[0] if args else "unknown" + self._calls[cmd].append((args, env, cwd)) + + try: + side_effects = self._side_effects[cmd].pop(0) + for side_effect in side_effects: + self._call_history.append( + ( + FakeUvCall(args, env, cwd), + FakeUvCall( + side_effect.expected_args, + side_effect.expected_env, + side_effect.expected_cwd + ), + ) + ) + side_effect.execute(args, cwd) + except IndexError: + pass + + return self._return_tuple + + def set_return_tuple(self, rc, out, err): + self._return_tuple = (rc, out, err) + + def packages_to_install(self, expected_args, packages, install_location=None): + """Mock package installation with fake packages""" + side_effects = [UvSideEffect(pkg, expected_args, install_location) for pkg in packages] + self._side_effects["pip"].append(side_effects) + + def sync_dependencies(self, expected_args, packages, project_dir=None): + """Mock UV sync operation""" + side_effects = [UvSyncSideEffect(pkg, expected_args, project_dir) for pkg in packages] + self._side_effects["sync"].append(side_effects) + + def validate(self): + """Validate that all expected calls were made""" + for cmd, calls in self._calls.items(): + if calls: + # All calls were made successfully + pass + + +class UvSideEffect(object): + """Side effect for UV pip install commands""" + + def __init__(self, package_name, expected_args, install_location=None): + self.package_name = package_name + self.expected_args = expected_args + self.expected_env = None + self.expected_cwd = None + self.install_location = install_location + + def execute(self, args, cwd=None): + """Execute the side effect - create fake package files""" + if self.install_location and os.path.exists(self.install_location): + # Create fake package directory + package_dir = os.path.join(self.install_location, self.package_name) + os.makedirs(package_dir, exist_ok=True) + + # Create fake __init__.py + init_file = os.path.join(package_dir, "__init__.py") + with open(init_file, "w") as f: + f.write(f"# Fake {self.package_name} package\n") + + # Create fake dist-info directory + dist_info = os.path.join(self.install_location, f"{self.package_name}-1.0.0.dist-info") + os.makedirs(dist_info, exist_ok=True) + + # Create fake METADATA file + metadata_file = os.path.join(dist_info, "METADATA") + with open(metadata_file, "w") as f: + f.write(f"Name: {self.package_name}\nVersion: 1.0.0\n") + + +class UvSyncSideEffect(object): + """Side effect for UV sync commands""" + + def __init__(self, package_name, expected_args, project_dir=None): + self.package_name = package_name + self.expected_args = expected_args + self.expected_env = None + self.expected_cwd = project_dir + + def execute(self, args, cwd=None): + """Execute sync side effect - create virtual environment structure""" + if cwd and os.path.exists(cwd): + # Create fake .venv directory + venv_dir = os.path.join(cwd, ".venv") + site_packages = os.path.join(venv_dir, "lib", "python3.13", "site-packages") + os.makedirs(site_packages, exist_ok=True) + + # Create fake package in site-packages + package_dir = os.path.join(site_packages, self.package_name) + os.makedirs(package_dir, exist_ok=True) + + init_file = os.path.join(package_dir, "__init__.py") + with open(init_file, "w") as f: + f.write(f"# Fake {self.package_name} package from sync\n") + + +@pytest.fixture +def osutils(): + return OSUtils() + + +@pytest.fixture +def uv_runner(osutils): + fake_uv = FakeUv() + subprocess_uv = SubprocessUv(osutils=osutils) + # Replace the real UV with our fake one + subprocess_uv.run_uv_command = fake_uv.run_uv_command + uv_runner = UvRunner(uv_subprocess=subprocess_uv, osutils=osutils) + return fake_uv, uv_runner + + +class TestPythonUvDependencyBuilder(object): + """Functional tests for UV dependency builder""" + + def _write_requirements_txt(self, packages, directory): + """Write requirements.txt file""" + contents = "\n".join(packages) + filepath = os.path.join(directory, "requirements.txt") + with open(filepath, "w") as f: + f.write(contents) + + def _write_pyproject_toml(self, packages, directory, name="test-project"): + """Write pyproject.toml file""" + deps = '", "'.join(packages) + content = f'''[project] +name = "{name}" +version = "1.0.0" +requires-python = ">=3.8" +dependencies = ["{deps}"] + +[tool.uv] +dev-dependencies = [] +''' + filepath = os.path.join(directory, "pyproject.toml") + with open(filepath, "w") as f: + f.write(content) + + def _make_appdir_and_dependency_builder(self, reqs, tmpdir, uv_runner, manifest_type="requirements", **kwargs): + """Create app directory and dependency builder""" + appdir = str(_create_app_structure(tmpdir)) + + if manifest_type == "requirements": + self._write_requirements_txt(reqs, appdir) + manifest_path = os.path.join(appdir, "requirements.txt") + elif manifest_type == "pyproject": + self._write_pyproject_toml(reqs, appdir) + manifest_path = os.path.join(appdir, "pyproject.toml") + else: + raise ValueError(f"Unknown manifest type: {manifest_type}") + + builder = PythonUvDependencyBuilder( + osutils=OSUtils(), + runtime="python3.13", + uv_runner=uv_runner, + **kwargs + ) + return appdir, builder, manifest_path + + def test_can_build_simple_requirements(self, tmpdir, uv_runner, osutils): + """Test building simple requirements.txt dependencies""" + reqs = ["requests", "boto3"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # Set up fake UV to return success + fake_uv.set_return_tuple(0, b"Successfully installed requests boto3", b"") + + # Mock the package installation + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--python-version", "3.13", "--python-platform", "x86_64-unknown-linux-gnu"], + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + for req in reqs: + assert req in installed_packages + + def test_can_build_pyproject_dependencies(self, tmpdir, uv_runner, osutils): + """Test building pyproject.toml dependencies""" + reqs = ["numpy", "pandas"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="pyproject" + ) + + # Set up fake UV sync operation + fake_uv.set_return_tuple(0, b"Resolved 2 packages", b"") + fake_uv.sync_dependencies( + expected_args=["sync", "--python", "3.13"], + packages=reqs, + project_dir=appdir + ) + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + # For pyproject.toml, packages would be in .venv initially + venv_site_packages = os.path.join(appdir, ".venv", "lib", "python3.13", "site-packages") + if os.path.exists(venv_site_packages): + installed_packages = os.listdir(venv_site_packages) + for req in reqs: + assert req in installed_packages + + def test_can_handle_arm64_architecture(self, tmpdir, uv_runner, osutils): + """Test building dependencies for ARM64 architecture""" + reqs = ["cryptography"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + fake_uv.set_return_tuple(0, b"Successfully installed cryptography", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--python-version", "3.13", "--python-platform", "aarch64-unknown-linux-gnu"], + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=ARM64, # Test ARM64 architecture + config=UvConfig() + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + for req in reqs: + assert req in installed_packages + + def test_handles_uv_installation_failure(self, tmpdir, uv_runner, osutils): + """Test handling of UV installation failures""" + reqs = ["nonexistent-package"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # Set up fake UV to return failure + fake_uv.set_return_tuple(1, b"", b"ERROR: Could not find a version that satisfies the requirement nonexistent-package") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + + with pytest.raises(UvBuildError): + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + def test_can_build_with_custom_config(self, tmpdir, uv_runner, osutils): + """Test building with custom UV configuration""" + reqs = ["flask"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + fake_uv.set_return_tuple(0, b"Successfully installed flask", b"") + + # Test with custom configuration + config = UvConfig( + index_url="https://custom-pypi.example.com/simple", + extra_index_urls=["https://extra-pypi.example.com/simple"], + no_cache=True + ) + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--index-url", "https://custom-pypi.example.com/simple"], + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=config + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + for req in reqs: + assert req in installed_packages + + def test_can_build_with_lock_file_optimization(self, tmpdir, uv_runner, osutils): + """Test building with existing uv.lock file for optimization""" + reqs = ["django"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="pyproject" + ) + + # Create a fake uv.lock file + lock_content = '''version = 1 +requires-python = ">=3.8" + +[[package]] +name = "django" +version = "4.2.0" +''' + lock_path = os.path.join(appdir, "uv.lock") + with open(lock_path, "w") as f: + f.write(lock_content) + + fake_uv.set_return_tuple(0, b"Using existing lock file", b"") + fake_uv.sync_dependencies( + expected_args=["sync", "--python", "3.13"], + packages=reqs, + project_dir=appdir + ) + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + # Verify lock file was used (it should still exist) + assert os.path.exists(lock_path) + fake_uv.validate() + + def test_can_build_mixed_package_types(self, tmpdir, uv_runner, osutils): + """Test building mixed package types (pure Python, binary, etc.)""" + reqs = ["requests", "numpy", "pyyaml"] # Mix of pure Python and binary packages + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + fake_uv.set_return_tuple(0, b"Successfully installed requests numpy pyyaml", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--python-version", "3.13"], + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + # Verify all package types were installed + for req in reqs: + assert req in installed_packages + # Verify dist-info directories exist + dist_info_pattern = f"{req}-*.dist-info" + dist_info_dirs = [d for d in installed_packages if d.startswith(f"{req}-") and d.endswith(".dist-info")] + assert len(dist_info_dirs) > 0, f"Missing dist-info for {req}" + + def test_can_handle_environment_specific_requirements(self, tmpdir, uv_runner, osutils): + """Test building with environment-specific requirements files""" + reqs = ["pytest", "coverage"] + fake_uv, runner = uv_runner + appdir = str(_create_app_structure(tmpdir)) + + # Create requirements-dev.txt (environment-specific) + dev_requirements = os.path.join(appdir, "requirements-dev.txt") + self._write_requirements_txt(reqs, appdir) + os.rename(os.path.join(appdir, "requirements.txt"), dev_requirements) + + builder = PythonUvDependencyBuilder( + osutils=OSUtils(), + runtime="python3.13", + uv_runner=runner + ) + + fake_uv.set_return_tuple(0, b"Successfully installed pytest coverage", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "-r", "requirements-dev.txt"], + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=dev_requirements, + architecture=X86_64, + config=UvConfig() + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + for req in reqs: + assert req in installed_packages + + def test_can_handle_large_dependency_trees(self, tmpdir, uv_runner, osutils): + """Test building large dependency trees efficiently""" + # Simulate a large project with many dependencies + reqs = [ + "django", "djangorestframework", "celery", "redis", "psycopg2-binary", + "pillow", "boto3", "requests", "numpy", "pandas", "matplotlib" + ] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="pyproject" + ) + + fake_uv.set_return_tuple(0, f"Resolved {len(reqs)} packages".encode(), b"") + fake_uv.sync_dependencies( + expected_args=["sync", "--python", "3.13"], + packages=reqs, + project_dir=appdir + ) + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + # For large dependency trees, verify core packages are present + venv_site_packages = os.path.join(appdir, ".venv", "lib", "python3.13", "site-packages") + if os.path.exists(venv_site_packages): + installed_packages = os.listdir(venv_site_packages) + core_packages = ["django", "requests", "boto3", "numpy"] + for pkg in core_packages: + assert pkg in installed_packages + + def test_can_handle_conflicting_dependencies(self, tmpdir, uv_runner, osutils): + """Test handling of conflicting dependency versions""" + # Create a scenario with potential version conflicts + reqs = ["package-a==1.0.0", "package-b>=2.0.0"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # UV should handle conflicts gracefully or fail with clear error + fake_uv.set_return_tuple(1, b"", b"No solution found when resolving dependencies") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + + with pytest.raises(UvBuildError) as exc_info: + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + # Verify error message contains useful information + assert "No solution found" in str(exc_info.value) + + def test_can_build_with_custom_python_version(self, tmpdir, uv_runner, osutils): + """Test building with different Python versions""" + reqs = ["typing-extensions"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # Test with Python 3.11 instead of 3.13 + builder_py311 = PythonUvDependencyBuilder( + osutils=OSUtils(), + runtime="python3.11", # Different Python version + uv_runner=runner + ) + + fake_uv.set_return_tuple(0, b"Successfully installed typing-extensions", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--python-version", "3.11"], # Should use 3.11 + packages=reqs, + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder_py311.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=UvConfig() + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + for req in reqs: + assert req in installed_packages + + def test_can_build_with_prerelease_packages(self, tmpdir, uv_runner, osutils): + """Test building with prerelease package versions""" + reqs = ["django>=4.0.0a1"] # Prerelease version + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # Configure to allow prereleases + config = UvConfig(prerelease="allow") + + fake_uv.set_return_tuple(0, b"Successfully installed django", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--prerelease", "allow"], + packages=["django"], + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=config + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + assert "django" in installed_packages + + def test_can_build_with_hash_verification(self, tmpdir, uv_runner, osutils): + """Test building with package hash verification""" + reqs = ["certifi==2023.7.22"] + fake_uv, runner = uv_runner + appdir, builder, manifest_path = self._make_appdir_and_dependency_builder( + reqs, tmpdir, runner, manifest_type="requirements" + ) + + # Configure to generate/verify hashes + config = UvConfig(generate_hashes=True) + + fake_uv.set_return_tuple(0, b"Successfully installed certifi", b"") + + site_packages = os.path.join(appdir, "site-packages") + os.makedirs(site_packages, exist_ok=True) + fake_uv.packages_to_install( + expected_args=["pip", "install", "--generate-hashes"], + packages=["certifi"], + install_location=site_packages + ) + + with osutils.tempdir() as scratch_dir: + builder.build_dependencies( + artifacts_dir_path=site_packages, + scratch_dir_path=scratch_dir, + manifest_path=manifest_path, + architecture=X86_64, + config=config + ) + + installed_packages = os.listdir(site_packages) + fake_uv.validate() + + assert "certifi" in installed_packages diff --git a/tests/integration/workflows/python_uv/__init__.py b/tests/integration/workflows/python_uv/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/integration/workflows/python_uv/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/workflows/python_uv/test_python_uv.py b/tests/integration/workflows/python_uv/test_python_uv.py new file mode 100644 index 000000000..334ecf881 --- /dev/null +++ b/tests/integration/workflows/python_uv/test_python_uv.py @@ -0,0 +1,220 @@ +import os +import pathlib +import shutil +import sys +import platform +import tempfile +from unittest import TestCase, skipIf + +from parameterized import parameterized_class + +from aws_lambda_builders.builder import LambdaBuilder +from aws_lambda_builders.exceptions import WorkflowFailedError +from aws_lambda_builders.utils import which +from aws_lambda_builders.workflows.python_uv.utils import EXPERIMENTAL_FLAG_BUILD_PERFORMANCE + +IS_WINDOWS = platform.system().lower() == "windows" + + +@parameterized_class(("experimental_flags",), [([]), ([EXPERIMENTAL_FLAG_BUILD_PERFORMANCE])]) +class TestPythonUvWorkflow(TestCase): + """ + Verifies that `python_uv` workflow works by building a Lambda with simple dependencies + """ + + TEST_DATA_FOLDER = os.path.join(os.path.dirname(__file__), "testdata") + experimental_flags = [] + + def setUp(self): + self.source_dir = self.TEST_DATA_FOLDER + self.artifacts_dir = tempfile.mkdtemp() + self.scratch_dir = tempfile.mkdtemp() + self.dependencies_dir = tempfile.mkdtemp() + + self.manifest_path_requirements = os.path.join(self.TEST_DATA_FOLDER, "requirements-simple.txt") + self.manifest_path_pyproject = os.path.join(self.TEST_DATA_FOLDER, "pyproject.toml") + + # PIP-equivalent test files for compatibility validation + self.manifest_path_numpy = os.path.join(self.TEST_DATA_FOLDER, "requirements-numpy.txt") + self.manifest_path_wrapt = os.path.join(self.TEST_DATA_FOLDER, "requirements-wrapt.txt") + self.manifest_path_invalid = os.path.join(self.TEST_DATA_FOLDER, "requirements-invalid.txt") + + self.test_data_files = { + "__init__.py", + "main.py", + "requirements-simple.txt", + "pyproject.toml", + "requirements-numpy.txt", + "requirements-wrapt.txt", + "requirements-invalid.txt", + "pyproject-numpy.toml", + "pyproject-wrapt.toml", + } + + def tearDown(self): + shutil.rmtree(self.artifacts_dir) + shutil.rmtree(self.scratch_dir) + shutil.rmtree(self.dependencies_dir) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_uses_requirements_txt_file(self): + builder = LambdaBuilder( + language="python", + dependency_manager="uv", + application_framework=None, + ) + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_requirements, + runtime="python3.9", + experimental_flags=self.experimental_flags, + ) + + expected_files = self.test_data_files.union({"six.py", "six-1.16.0.dist-info"}) + output_files = set(os.listdir(self.artifacts_dir)) + self.assertEqual(expected_files.intersection(output_files), expected_files) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_uses_pyproject_toml_file(self): + builder = LambdaBuilder( + language="python", + dependency_manager="uv", + application_framework=None, + ) + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_pyproject, + runtime="python3.9", + experimental_flags=self.experimental_flags, + ) + + expected_files = self.test_data_files.union({"six.py", "six-1.16.0.dist-info"}) + output_files = set(os.listdir(self.artifacts_dir)) + self.assertEqual(expected_files.intersection(output_files), expected_files) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_with_dependencies_dir(self): + builder = LambdaBuilder( + language="python", + dependency_manager="uv", + application_framework=None, + ) + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_requirements, + runtime="python3.9", + dependencies_dir=self.dependencies_dir, + experimental_flags=self.experimental_flags, + ) + + expected_files = self.test_data_files + output_files = set(os.listdir(self.artifacts_dir)) + self.assertEqual(expected_files.intersection(output_files), expected_files) + + expected_dependencies = {"six.py", "six-1.16.0.dist-info"} + dependencies_files = set(os.listdir(self.dependencies_dir)) + self.assertEqual(expected_dependencies.intersection(dependencies_files), expected_dependencies) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_builds_numpy_successfully(self): + """Test that UV can build numpy (same as PIP workflow test)""" + builder = LambdaBuilder(language="python", dependency_manager="uv", application_framework=None) + + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_numpy, + runtime="python3.13", + experimental_flags=self.experimental_flags, + ) + + # Check that numpy was built successfully + output_files = set(os.listdir(self.artifacts_dir)) + expected_numpy_files = {"numpy", "numpy-2.1.2.dist-info", "numpy.libs"} + + # Verify numpy files are present + for expected_file in expected_numpy_files: + self.assertIn(expected_file, output_files, f"Expected {expected_file} in build output") + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_fails_with_wrapt_python313(self): + """Test that UV fails with wrapt on Python 3.13 (same as PIP workflow)""" + builder = LambdaBuilder(language="python", dependency_manager="uv", application_framework=None) + + # Should fail due to Python 3.13 incompatibility + with self.assertRaises(WorkflowFailedError): + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_wrapt, + runtime="python3.13", + experimental_flags=self.experimental_flags, + ) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_fails_with_invalid_requirements(self): + """Test that UV properly handles invalid requirements syntax (same as PIP workflow)""" + builder = LambdaBuilder(language="python", dependency_manager="uv", application_framework=None) + + # Should fail due to invalid syntax (boto3=1.19.99 instead of boto3==1.19.99) + with self.assertRaises(WorkflowFailedError) as ctx: + builder.build( + self.source_dir, + self.artifacts_dir, + self.scratch_dir, + self.manifest_path_invalid, + runtime="python3.13", + experimental_flags=self.experimental_flags, + ) + + # Verify error message mentions the syntax issue + error_message = str(ctx.exception) + self.assertIn("no such comparison operator", error_message) + + @skipIf(which("uv") is None, "uv not available") + def test_workflow_builds_numpy_with_pyproject(self): + """Test that UV can build numpy using pyproject.toml format""" + builder = LambdaBuilder(language="python", dependency_manager="uv", application_framework=None) + + # Create a temporary directory with pyproject.toml (UV only recognizes exact name) + temp_source_dir = tempfile.mkdtemp() + try: + # Copy main.py to temp directory + shutil.copy(os.path.join(self.TEST_DATA_FOLDER, "main.py"), temp_source_dir) + shutil.copy(os.path.join(self.TEST_DATA_FOLDER, "__init__.py"), temp_source_dir) + + # Copy pyproject-numpy.toml as pyproject.toml + shutil.copy( + os.path.join(self.TEST_DATA_FOLDER, "pyproject-numpy.toml"), + os.path.join(temp_source_dir, "pyproject.toml"), + ) + + pyproject_path = os.path.join(temp_source_dir, "pyproject.toml") + + builder.build( + temp_source_dir, + self.artifacts_dir, + self.scratch_dir, + pyproject_path, + runtime="python3.13", + experimental_flags=self.experimental_flags, + ) + + # Check that numpy was built successfully + output_files = set(os.listdir(self.artifacts_dir)) + expected_numpy_files = {"numpy", "numpy-2.1.2.dist-info", "numpy.libs"} + + # Verify numpy files are present + for expected_file in expected_numpy_files: + self.assertIn(expected_file, output_files, f"Expected {expected_file} in build output") + + finally: + shutil.rmtree(temp_source_dir) diff --git a/tests/integration/workflows/python_uv/testdata/__init__.py b/tests/integration/workflows/python_uv/testdata/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/integration/workflows/python_uv/testdata/main.py b/tests/integration/workflows/python_uv/testdata/main.py new file mode 100644 index 000000000..19bcd9ef9 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/main.py @@ -0,0 +1,5 @@ +import six + + +def lambda_handler(event, context): + return {"statusCode": 200, "body": f"Six version: {six.__version__}"} diff --git a/tests/integration/workflows/python_uv/testdata/pyproject-numpy.toml b/tests/integration/workflows/python_uv/testdata/pyproject-numpy.toml new file mode 100644 index 000000000..4643b2293 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/pyproject-numpy.toml @@ -0,0 +1,15 @@ +[project] +name = "test-lambda-numpy" +version = "0.1.0" +requires-python = ">=3.8" +dependencies = [ + "numpy==1.20.3; python_version == '3.8'", + "numpy==1.20.3; python_version == '3.9'", + "numpy==1.23.5; python_version == '3.10'", + "numpy==1.23.5; python_version == '3.11'", + "numpy==2.1.2; python_version == '3.12'", + "numpy==2.1.2; python_version == '3.13'" +] + +[tool.uv] +dev-dependencies = [] diff --git a/tests/integration/workflows/python_uv/testdata/pyproject-wrapt.toml b/tests/integration/workflows/python_uv/testdata/pyproject-wrapt.toml new file mode 100644 index 000000000..5eb793f82 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/pyproject-wrapt.toml @@ -0,0 +1,10 @@ +[project] +name = "test-lambda-wrapt" +version = "0.1.0" +requires-python = ">=3.8" +dependencies = [ + "wrapt==1.13.3" +] + +[tool.uv] +dev-dependencies = [] diff --git a/tests/integration/workflows/python_uv/testdata/pyproject.toml b/tests/integration/workflows/python_uv/testdata/pyproject.toml new file mode 100644 index 000000000..3d9d529f4 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/pyproject.toml @@ -0,0 +1,11 @@ +[project] +name = "test-lambda" +version = "0.1.0" +requires-python = ">=3.8" +dependencies = [ + "six==1.16.0" +] + +# No build-system needed for Lambda functions - just dependencies +[tool.uv] +dev-dependencies = [] diff --git a/tests/integration/workflows/python_uv/testdata/requirements-inflate.txt b/tests/integration/workflows/python_uv/testdata/requirements-inflate.txt new file mode 100644 index 000000000..e2821a11e --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/requirements-inflate.txt @@ -0,0 +1,2 @@ +inflate64==0.3.1; python_version < '3.8' --no-binary=:inflate64: +inflate64==1.0.0; python_version >= '3.8' --no-binary=:inflate64: diff --git a/tests/integration/workflows/python_uv/testdata/requirements-invalid.txt b/tests/integration/workflows/python_uv/testdata/requirements-invalid.txt new file mode 100644 index 000000000..30618c7fb --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/requirements-invalid.txt @@ -0,0 +1 @@ +boto3=1.19.99 diff --git a/tests/integration/workflows/python_uv/testdata/requirements-numpy.txt b/tests/integration/workflows/python_uv/testdata/requirements-numpy.txt new file mode 100644 index 000000000..3682fe698 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/requirements-numpy.txt @@ -0,0 +1,6 @@ +numpy==1.20.3; python_version == '3.8' +numpy==1.20.3; python_version == '3.9' +numpy==1.23.5; python_version == '3.10' +numpy==1.23.5; python_version == '3.11' +numpy==2.1.2; python_version == '3.12' +numpy==2.1.2; python_version == '3.13' diff --git a/tests/integration/workflows/python_uv/testdata/requirements-simple.txt b/tests/integration/workflows/python_uv/testdata/requirements-simple.txt new file mode 100644 index 000000000..3b370b70c --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/requirements-simple.txt @@ -0,0 +1 @@ +six==1.16.0 diff --git a/tests/integration/workflows/python_uv/testdata/requirements-wrapt.txt b/tests/integration/workflows/python_uv/testdata/requirements-wrapt.txt new file mode 100644 index 000000000..ba46249d2 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/requirements-wrapt.txt @@ -0,0 +1 @@ +wrapt==1.13.3 diff --git a/tests/integration/workflows/python_uv/testdata/uv.lock b/tests/integration/workflows/python_uv/testdata/uv.lock new file mode 100644 index 000000000..e3172ebe2 --- /dev/null +++ b/tests/integration/workflows/python_uv/testdata/uv.lock @@ -0,0 +1,26 @@ +version = 1 +revision = 2 +requires-python = ">=3.8" + +[[package]] +name = "six" +version = "1.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/71/39/171f1c67cd00715f190ba0b100d606d440a28c93c7714febeca8b79af85e/six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", size = 34041, upload_time = "2021-05-05T14:18:18.379Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/5a/e7c31adbe875f2abbb91bd84cf2dc52d792b5a01506781dbcf25c91daf11/six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254", size = 11053, upload_time = "2021-05-05T14:18:17.237Z" }, +] + +[[package]] +name = "test-lambda" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "six" }, +] + +[package.metadata] +requires-dist = [{ name = "six", specifier = "==1.16.0" }] + +[package.metadata.requires-dev] +dev = [] diff --git a/tests/unit/workflows/python_uv/__init__.py b/tests/unit/workflows/python_uv/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/tests/unit/workflows/python_uv/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/workflows/python_uv/test_packager.py b/tests/unit/workflows/python_uv/test_packager.py new file mode 100644 index 000000000..fe5af5233 --- /dev/null +++ b/tests/unit/workflows/python_uv/test_packager.py @@ -0,0 +1,294 @@ +from unittest import TestCase +from unittest.mock import Mock, patch + +from aws_lambda_builders.architecture import X86_64, ARM64 +from aws_lambda_builders.workflows.python_uv.packager import SubprocessUv, UvRunner, PythonUvDependencyBuilder +from aws_lambda_builders.workflows.python_uv.exceptions import MissingUvError, UvInstallationError, UvBuildError +from aws_lambda_builders.workflows.python_uv.utils import UvConfig + + +class TestSubprocessUv(TestCase): + @patch("aws_lambda_builders.workflows.python_uv.packager.OSUtils") + def test_subprocess_uv_init_success(self, mock_osutils_class): + mock_osutils = Mock() + mock_osutils.which.return_value = "/usr/bin/uv" + mock_osutils_class.return_value = mock_osutils + + subprocess_uv = SubprocessUv() + self.assertEqual(subprocess_uv.uv_executable, "/usr/bin/uv") + + @patch("aws_lambda_builders.workflows.python_uv.packager.OSUtils") + def test_subprocess_uv_init_missing_uv(self, mock_osutils_class): + mock_osutils = Mock() + mock_osutils.which.return_value = None + mock_osutils_class.return_value = mock_osutils + + with self.assertRaises(MissingUvError): + SubprocessUv() + + @patch("aws_lambda_builders.workflows.python_uv.packager.OSUtils") + def test_run_uv_command_success(self, mock_osutils_class): + mock_osutils = Mock() + mock_osutils.which.return_value = "/usr/bin/uv" + mock_osutils.run_subprocess.return_value = (0, "success", "") + mock_osutils_class.return_value = mock_osutils + + subprocess_uv = SubprocessUv() + rc, stdout, stderr = subprocess_uv.run_uv_command(["--version"]) + + self.assertEqual(rc, 0) + self.assertEqual(stdout, "success") + self.assertEqual(stderr, "") + mock_osutils.run_subprocess.assert_called_once_with(["/usr/bin/uv", "--version"], cwd=None, env=None) + + +class TestUvRunner(TestCase): + def setUp(self): + self.mock_subprocess_uv = Mock() + self.mock_subprocess_uv.uv_executable = "/usr/bin/uv" + self.mock_osutils = Mock() + self.uv_runner = UvRunner(uv_subprocess=self.mock_subprocess_uv, osutils=self.mock_osutils) + + @patch("aws_lambda_builders.workflows.python_uv.packager.get_uv_version") + def test_uv_version_property(self, mock_get_version): + mock_get_version.return_value = "0.6.16" + + version = self.uv_runner.uv_version + self.assertEqual(version, "0.6.16") + mock_get_version.assert_called_once_with("/usr/bin/uv", self.mock_osutils) + + def test_sync_dependencies_success(self): + self.mock_subprocess_uv.run_uv_command.return_value = (0, "success", "") + + self.uv_runner.sync_dependencies( + manifest_path="/path/to/pyproject.toml", + target_dir="/target", + scratch_dir="/scratch", + python_version="3.9", + platform="linux", + architecture=X86_64, + ) + + # Verify UV sync command was called with correct arguments + args_called = self.mock_subprocess_uv.run_uv_command.call_args[0][0] + self.assertIn("sync", args_called) + self.assertIn("--python", args_called) + self.assertIn("3.9", args_called) + # Note: UV sync doesn't support --target, it syncs to the project environment + + def test_sync_dependencies_failure(self): + self.mock_subprocess_uv.run_uv_command.return_value = (1, "", "error message") + + with self.assertRaises(UvInstallationError): + self.uv_runner.sync_dependencies( + manifest_path="/path/to/pyproject.toml", target_dir="/target", scratch_dir="/scratch" + ) + + def test_install_requirements_success(self): + self.mock_subprocess_uv.run_uv_command.return_value = (0, "success", "") + + self.uv_runner.install_requirements( + requirements_path="/path/to/requirements.txt", + target_dir="/target", + scratch_dir="/scratch", + python_version="3.9", + platform="linux", + architecture=X86_64, + ) + + # Verify UV pip install command was called + args_called = self.mock_subprocess_uv.run_uv_command.call_args[0][0] + self.assertIn("pip", args_called) + self.assertIn("install", args_called) + self.assertIn("-r", args_called) + self.assertIn("/path/to/requirements.txt", args_called) + + def test_install_requirements_failure(self): + self.mock_subprocess_uv.run_uv_command.return_value = (1, "", "error message") + + with self.assertRaises(UvInstallationError): + self.uv_runner.install_requirements( + requirements_path="/path/to/requirements.txt", target_dir="/target", scratch_dir="/scratch" + ) + + +class TestPythonUvDependencyBuilder(TestCase): + def setUp(self): + self.mock_osutils = Mock() + self.mock_uv_runner = Mock() + self.builder = PythonUvDependencyBuilder( + osutils=self.mock_osutils, runtime="python3.9", uv_runner=self.mock_uv_runner + ) + + def test_extract_python_version(self): + self.assertEqual(self.builder._extract_python_version("python3.9"), "3.9") + self.assertEqual(self.builder._extract_python_version("python3.11"), "3.11") + self.assertEqual(self.builder._extract_python_version("3.10"), "3.10") + + # Runtime is required - should raise error when None + with self.assertRaises(UvBuildError) as context: + self.builder._extract_python_version(None) + self.assertIn("Runtime is required", str(context.exception)) + + # Empty string should also raise error + with self.assertRaises(UvBuildError) as context: + self.builder._extract_python_version("") + self.assertIn("Runtime is required", str(context.exception)) + + def test_build_from_lock_file(self): + self.builder._build_from_lock_file( + lock_path="/path/to/uv.lock", + target_dir="/target", + scratch_dir="/scratch", + python_version="3.9", + architecture=X86_64, + config=UvConfig(), + ) + + self.mock_uv_runner.sync_dependencies.assert_called_once() + + def test_build_from_requirements(self): + self.builder._build_from_requirements( + requirements_path="/path/to/requirements.txt", + target_dir="/target", + scratch_dir="/scratch", + python_version="3.9", + architecture=X86_64, + config=UvConfig(), + ) + + self.mock_uv_runner.install_requirements.assert_called_once() + + def test_build_dependencies_with_requirements_txt(self): + with patch("os.path.basename", return_value="requirements.txt"): + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/requirements.txt", + architecture=X86_64, + ) + + self.mock_uv_runner.install_requirements.assert_called_once() + + def test_build_dependencies_with_uv_lock_standalone_fails(self): + """Test that uv.lock as standalone manifest fails (requires pyproject.toml).""" + with patch("os.path.basename", return_value="uv.lock"): + with self.assertRaises(UvBuildError) as context: + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/uv.lock", + architecture=X86_64, + ) + + self.assertIn("Unsupported manifest file: uv.lock", str(context.exception)) + + # Should not call any UV operations for unsupported manifest + self.mock_uv_runner.sync_dependencies.assert_not_called() + self.mock_uv_runner.install_requirements.assert_not_called() + + def test_build_dependencies_pyproject_with_uv_lock(self): + """Test that pyproject.toml with uv.lock present uses lock-based build.""" + with patch("os.path.basename", return_value="pyproject.toml"), patch( + "os.path.dirname", return_value="/path/to" + ), patch("os.path.exists") as mock_exists: + + # Mock that uv.lock exists alongside pyproject.toml + mock_exists.return_value = True + + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/pyproject.toml", + architecture=X86_64, + ) + + # Should use sync_dependencies (lock-based build) + self.mock_uv_runner.sync_dependencies.assert_called_once() + self.mock_uv_runner.install_requirements.assert_not_called() + + # Verify it checked for uv.lock in the right location + mock_exists.assert_called_with("/path/to/uv.lock") + + def test_build_dependencies_pyproject_without_uv_lock(self): + """Test that pyproject.toml without uv.lock uses standard pyproject build.""" + with patch("os.path.basename", return_value="pyproject.toml"), patch( + "os.path.dirname", return_value="/path/to" + ), patch("os.path.exists") as mock_exists, patch.object( + self.builder, "_export_pyproject_to_requirements", return_value="/temp/requirements.txt" + ): + + # Mock that uv.lock does NOT exist alongside pyproject.toml + mock_exists.return_value = False + + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/pyproject.toml", + architecture=X86_64, + ) + + # Should use install_requirements (standard pyproject build) + self.mock_uv_runner.install_requirements.assert_called_once() + self.mock_uv_runner.sync_dependencies.assert_not_called() + + # Verify it checked for uv.lock in the right location + mock_exists.assert_called_with("/path/to/uv.lock") + + def test_build_dependencies_configures_cache_dir(self): + """Test that build_dependencies properly configures UV cache directory in scratch_dir.""" + with patch("os.path.basename", return_value="requirements.txt"): + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/requirements.txt", + architecture=X86_64, + ) + + # Verify that install_requirements was called with scratch_dir + call_args = self.mock_uv_runner.install_requirements.call_args + self.assertEqual(call_args[1]["scratch_dir"], "/scratch") + + # Verify that makedirs was called to create cache directory + self.mock_osutils.makedirs.assert_called() + + def test_build_dependencies_respects_existing_cache_dir(self): + """Test that existing cache_dir in config is respected.""" + from aws_lambda_builders.workflows.python_uv.utils import UvConfig + + config = UvConfig(cache_dir="/custom/cache") + + with patch("os.path.basename", return_value="requirements.txt"): + self.builder.build_dependencies( + artifacts_dir_path="/artifacts", + scratch_dir_path="/scratch", + manifest_path="/path/to/requirements.txt", + architecture=X86_64, + config=config, + ) + + # Verify that the custom cache directory is preserved + call_args = self.mock_uv_runner.install_requirements.call_args + passed_config = call_args[1]["config"] + self.assertEqual(passed_config.cache_dir, "/custom/cache") + + def test_is_requirements_file_standard_names(self): + # Test standard requirements file name + self.assertTrue(self.builder._is_requirements_file("requirements.txt")) + + def test_is_requirements_file_environment_specific(self): + # Test environment-specific requirements files + self.assertTrue(self.builder._is_requirements_file("requirements-dev.txt")) + self.assertTrue(self.builder._is_requirements_file("requirements-test.txt")) + self.assertTrue(self.builder._is_requirements_file("requirements-prod.txt")) + self.assertTrue(self.builder._is_requirements_file("requirements-staging.txt")) + + def test_is_requirements_file_invalid_names(self): + # Test invalid requirements file names + self.assertFalse(self.builder._is_requirements_file("requirements")) + self.assertFalse(self.builder._is_requirements_file("requirements.in")) + self.assertFalse(self.builder._is_requirements_file("requirements.py")) + self.assertFalse(self.builder._is_requirements_file("my-requirements.txt")) + self.assertFalse(self.builder._is_requirements_file("requirements.txt.bak")) + self.assertFalse(self.builder._is_requirements_file("requirements-")) + self.assertFalse(self.builder._is_requirements_file("requirements-.txt")) diff --git a/tests/unit/workflows/python_uv/test_utils.py b/tests/unit/workflows/python_uv/test_utils.py new file mode 100644 index 000000000..0c8ba5cfb --- /dev/null +++ b/tests/unit/workflows/python_uv/test_utils.py @@ -0,0 +1,156 @@ +import os +import tempfile +from unittest import TestCase +from unittest.mock import patch, Mock + +from aws_lambda_builders.workflows.python_uv.utils import ( + OSUtils, + detect_uv_manifest, + get_uv_version, + UvConfig, +) + + +class TestOSUtils(TestCase): + def setUp(self): + self.osutils = OSUtils() + + def test_which_existing_command(self): + # Test with a command that should exist on most systems + result = self.osutils.which("ls") + self.assertIsNotNone(result) + self.assertTrue(os.path.exists(result)) + + def test_which_nonexistent_command(self): + result = self.osutils.which("nonexistent_command_12345") + self.assertIsNone(result) + + def test_run_subprocess_success(self): + rc, stdout, stderr = self.osutils.run_subprocess(["echo", "hello"]) + self.assertEqual(rc, 0) + self.assertEqual(stdout.strip(), "hello") + self.assertEqual(stderr, "") + + def test_run_subprocess_failure(self): + rc, stdout, stderr = self.osutils.run_subprocess(["false"]) + self.assertEqual(rc, 1) + + +class TestDetectUvManifest(TestCase): + def test_detect_uv_manifest_no_files(self): + with tempfile.TemporaryDirectory() as temp_dir: + result = detect_uv_manifest(temp_dir) + self.assertIsNone(result) + + def test_detect_uv_manifest_requirements_txt(self): + with tempfile.TemporaryDirectory() as temp_dir: + req_path = os.path.join(temp_dir, "requirements.txt") + with open(req_path, "w") as f: + f.write("requests==2.28.0\n") + + result = detect_uv_manifest(temp_dir) + self.assertEqual(result, req_path) + + def test_detect_uv_manifest_pyproject_toml(self): + with tempfile.TemporaryDirectory() as temp_dir: + pyproject_path = os.path.join(temp_dir, "pyproject.toml") + with open(pyproject_path, "w") as f: + f.write("[project]\nname = 'test'\n") + + result = detect_uv_manifest(temp_dir) + self.assertEqual(result, pyproject_path) + + def test_detect_uv_manifest_pyproject_priority(self): + with tempfile.TemporaryDirectory() as temp_dir: + # Create multiple manifest files + pyproject_path = os.path.join(temp_dir, "pyproject.toml") + req_path = os.path.join(temp_dir, "requirements.txt") + + with open(pyproject_path, "w") as f: + f.write("[project]\nname = 'test'\n") + with open(req_path, "w") as f: + f.write("requests==2.28.0\n") + + result = detect_uv_manifest(temp_dir) + # pyproject.toml should have priority over requirements.txt + self.assertEqual(result, pyproject_path) + + def test_detect_uv_manifest_requirements_variants(self): + with tempfile.TemporaryDirectory() as temp_dir: + # Create requirements-dev.txt + req_dev_path = os.path.join(temp_dir, "requirements-dev.txt") + with open(req_dev_path, "w") as f: + f.write("pytest==7.0.0\n") + + result = detect_uv_manifest(temp_dir) + self.assertEqual(result, req_dev_path) + + +class TestGetUvVersion(TestCase): + def test_get_uv_version_success(self): + osutils_mock = Mock() + osutils_mock.run_subprocess.return_value = (0, "uv 0.6.16", "") + + result = get_uv_version("uv", osutils_mock) + self.assertEqual(result, "0.6.16") + + def test_get_uv_version_failure(self): + osutils_mock = Mock() + osutils_mock.run_subprocess.return_value = (1, "", "command not found") + + result = get_uv_version("uv", osutils_mock) + self.assertIsNone(result) + + +class TestUvConfig(TestCase): + def test_uv_config_defaults(self): + config = UvConfig() + args = config.to_uv_args() + self.assertEqual(args, []) + + def test_uv_config_with_index_url(self): + config = UvConfig(index_url="https://pypi.org/simple/") + args = config.to_uv_args() + self.assertIn("--index-url", args) + self.assertIn("https://pypi.org/simple/", args) + + def test_uv_config_with_extra_index_urls(self): + config = UvConfig(extra_index_urls=["https://extra1.com", "https://extra2.com"]) + args = config.to_uv_args() + self.assertIn("--extra-index-url", args) + self.assertIn("https://extra1.com", args) + self.assertIn("https://extra2.com", args) + + def test_uv_config_with_cache_dir(self): + config = UvConfig(cache_dir="/tmp/uv-cache") + args = config.to_uv_args() + self.assertIn("--cache-dir", args) + self.assertIn("/tmp/uv-cache", args) + + def test_uv_config_no_cache(self): + config = UvConfig(no_cache=True) + args = config.to_uv_args() + self.assertIn("--no-cache", args) + + def test_uv_config_prerelease(self): + config = UvConfig(prerelease="allow") + args = config.to_uv_args() + self.assertIn("--prerelease", args) + self.assertIn("allow", args) + + def test_uv_config_resolution(self): + config = UvConfig(resolution="lowest") + args = config.to_uv_args() + self.assertIn("--resolution", args) + self.assertIn("lowest", args) + + def test_uv_config_exclude_newer(self): + config = UvConfig(exclude_newer="2023-01-01") + args = config.to_uv_args() + self.assertIn("--exclude-newer", args) + self.assertIn("2023-01-01", args) + + def test_uv_config_generate_hashes(self): + config = UvConfig(generate_hashes=True) + args = config.to_uv_args() + self.assertIn("--generate-hashes", args) diff --git a/tests/unit/workflows/python_uv/test_workflow.py b/tests/unit/workflows/python_uv/test_workflow.py new file mode 100644 index 000000000..3be8585d6 --- /dev/null +++ b/tests/unit/workflows/python_uv/test_workflow.py @@ -0,0 +1,185 @@ +from unittest import TestCase +from unittest.mock import patch, ANY, Mock + +from parameterized import parameterized_class + +from aws_lambda_builders.actions import CopySourceAction, CleanUpAction, LinkSourceAction +from aws_lambda_builders.path_resolver import PathResolver +from aws_lambda_builders.workflows.python_uv.utils import OSUtils, EXPERIMENTAL_FLAG_BUILD_PERFORMANCE +from aws_lambda_builders.workflows.python_uv.workflow import PythonUvWorkflow +from aws_lambda_builders.workflows.python_uv.actions import PythonUvBuildAction, CopyDependenciesAction + + +@parameterized_class( + ("experimental_flags",), + [ + ([]), + ([EXPERIMENTAL_FLAG_BUILD_PERFORMANCE]), + ], +) +class TestPythonUvWorkflow(TestCase): + experimental_flags = [] + + def setUp(self): + self.osutils = OSUtils() + self.osutils_mock = Mock(spec=self.osutils) + self.osutils_mock.file_exists.return_value = True + self.workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "manifest", + runtime="python3.9", + osutils=self.osutils_mock, + experimental_flags=self.experimental_flags, + ) + self.python_major_version = "3" + self.python_minor_version = "9" + self.language = "python" + + def test_workflow_sets_up_actions(self): + self.assertEqual(len(self.workflow.actions), 2) + self.assertIsInstance(self.workflow.actions[0], PythonUvBuildAction) + self.assertIsInstance(self.workflow.actions[1], CopySourceAction) + + def test_workflow_sets_up_actions_without_requirements(self): + self.osutils_mock.file_exists.return_value = False + self.workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "manifest", + runtime="python3.9", + osutils=self.osutils_mock, + experimental_flags=self.experimental_flags, + ) + + self.assertEqual(len(self.workflow.actions), 1) + self.assertIsInstance(self.workflow.actions[0], CopySourceAction) + + def test_workflow_sets_up_actions_with_dependencies_dir(self): + self.workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "manifest", + runtime="python3.9", + osutils=self.osutils_mock, + dependencies_dir="deps", + experimental_flags=self.experimental_flags, + ) + + self.assertEqual(len(self.workflow.actions), 4) + self.assertIsInstance(self.workflow.actions[0], CleanUpAction) + self.assertIsInstance(self.workflow.actions[1], PythonUvBuildAction) + self.assertIsInstance(self.workflow.actions[2], CopyDependenciesAction) + self.assertIsInstance(self.workflow.actions[3], CopySourceAction) + + def test_workflow_sets_up_actions_without_download_dependencies_and_dependencies_dir(self): + self.workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "manifest", + runtime="python3.9", + osutils=self.osutils_mock, + download_dependencies=False, + experimental_flags=self.experimental_flags, + ) + + self.assertEqual(len(self.workflow.actions), 1) + self.assertIsInstance(self.workflow.actions[0], CopySourceAction) + + def test_workflow_sets_up_actions_without_combine_dependencies(self): + self.workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "manifest", + runtime="python3.9", + osutils=self.osutils_mock, + dependencies_dir="deps", + combine_dependencies=False, + experimental_flags=self.experimental_flags, + ) + + self.assertEqual(len(self.workflow.actions), 3) + self.assertIsInstance(self.workflow.actions[0], CleanUpAction) + self.assertIsInstance(self.workflow.actions[1], PythonUvBuildAction) + self.assertIsInstance(self.workflow.actions[2], CopySourceAction) + + def test_workflow_name(self): + self.assertEqual(self.workflow.NAME, "PythonUvBuilder") + + def test_workflow_capability(self): + self.assertEqual(self.workflow.CAPABILITY.language, "python") + self.assertEqual(self.workflow.CAPABILITY.dependency_manager, "uv") + self.assertEqual(self.workflow.CAPABILITY.application_framework, None) + + def test_workflow_excluded_files(self): + expected_excluded_files = ( + ".aws-sam", + ".chalice", + ".git", + ".gitignore", + "*.pyc", + "__pycache__", + "*.so", + ".Python", + "*.egg-info", + "*.egg", + "pip-log.txt", + "pip-delete-this-directory.txt", + "htmlcov", + ".tox", + ".nox", + ".coverage", + ".cache", + ".pytest_cache", + ".python-version", + ".mypy_cache", + ".dmypy.json", + ".pyre", + ".env", + ".venv", + "venv", + "venv.bak", + "env.bak", + "ENV", + "env", + ".uv-cache", + "uv.lock.bak", + ".vscode", + ".idea", + ) + self.assertEqual(self.workflow.EXCLUDED_FILES, expected_excluded_files) + + def test_get_resolvers(self): + resolvers = self.workflow.get_resolvers() + self.assertEqual(len(resolvers), 1) + self.assertIsInstance(resolvers[0], PathResolver) + + def test_get_validators(self): + validators = self.workflow.get_validators() + # UV has built-in Python version handling, no external validators needed + self.assertEqual(len(validators), 0) + + @patch("aws_lambda_builders.workflows.python_uv.workflow.detect_uv_manifest") + def test_workflow_auto_detects_manifest(self, mock_detect): + mock_detect.return_value = "/path/to/pyproject.toml" + self.osutils_mock.file_exists.return_value = False # Original manifest doesn't exist + + workflow = PythonUvWorkflow( + "source", + "artifacts", + "scratch_dir", + "nonexistent_manifest", + runtime="python3.9", + osutils=self.osutils_mock, + experimental_flags=self.experimental_flags, + ) + + mock_detect.assert_called_once_with("source") + # Should have UV build action since manifest was detected + self.assertEqual(len(workflow.actions), 2) + self.assertIsInstance(workflow.actions[0], PythonUvBuildAction)