diff --git a/py-rattler-build/rattler_build/__init__.py b/py-rattler-build/rattler_build/__init__.py index 1c788a726..a49a9efc8 100644 --- a/py-rattler-build/rattler_build/__init__.py +++ b/py-rattler-build/rattler_build/__init__.py @@ -22,6 +22,36 @@ SelectorConfig, ) +from .variant_config import ( + Pin, + VariantConfig, +) + +from .sandbox_config import ( + SandboxConfig, +) + +from .directories import ( + Directories, +) + +from .packaging_settings import ( + PackagingConfig, + ArchiveType, +) + +from .debug import ( + Debug, +) + +from .test_configuration import ( + TestConfig, +) + +from .build_configuration import ( + BuildConfig, +) + from . import recipe_generation @@ -49,6 +79,15 @@ "TestType", "TestTypeEnum", "SelectorConfig", + "Pin", + "VariantConfig", + "SandboxConfig", + "Directories", + "PackagingConfig", + "ArchiveType", + "Debug", + "TestConfig", + "BuildConfig", ] diff --git a/py-rattler-build/rattler_build/build_configuration.py b/py-rattler-build/rattler_build/build_configuration.py new file mode 100644 index 000000000..e154d4702 --- /dev/null +++ b/py-rattler-build/rattler_build/build_configuration.py @@ -0,0 +1,252 @@ +"""Python bindings for BuildConfig.""" + +from typing import Any, Dict, List, Optional +from .rattler_build import BuildConfig as _BuildConfig +from .debug import Debug +from .directories import Directories +from .packaging_settings import PackagingConfig +from .sandbox_config import SandboxConfig + + +class BuildConfig(_BuildConfig): + """ + Complete configuration for building a package. + + BuildConfig contains all settings needed to build a conda package, + including platforms, variants, channels, directories, and build settings. + + This is typically created internally during the build process and exposed + for inspection. + + Note: + This class is read-only. Properties cannot be modified from Python. + + Examples: + Access build configuration (from build context): + >>> config = get_build_config() # From build context + >>> print(f"Target: {config.target_platform}") + >>> print(f"Hash: {config.hash}") + >>> if config.cross_compilation(): + ... print("Cross-compiling!") + >>> print(f"Channels: {config.channels}") + """ + + @property + def target_platform(self) -> str: + """ + The target platform for the build. + + The platform for which the package is being built. + + Returns: + Target platform string (e.g., "linux-64", "osx-arm64") + """ + ... + + @property + def host_platform(self) -> Dict[str, Any]: + """ + The host platform with virtual packages. + + The platform where the package will run (usually same as target, + but different for noarch packages). + + Returns: + Dictionary with 'platform' (str) and 'virtual_packages' (list) keys + """ + ... + + @property + def build_platform(self) -> Dict[str, Any]: + """ + The build platform with virtual packages. + + The platform on which the build is running. + + Returns: + Dictionary with 'platform' (str) and 'virtual_packages' (list) keys + """ + ... + + @property + def variant(self) -> Dict[str, Any]: + """ + The variant configuration for this build. + + The selected variant (e.g., python version, numpy version). + + Returns: + Dictionary mapping variant keys to their values + """ + ... + + @property + def hash(self) -> str: + """ + The computed hash of the variant configuration. + + Returns: + Hash string (e.g., "h1234567_0") + """ + ... + + @property + def directories(self) -> Directories: + """ + The build directories. + + Returns: + Directories instance with all build paths + """ + ... + + @property + def channels(self) -> List[str]: + """ + The channels used for resolving dependencies. + + Returns: + List of channel URLs as strings + """ + ... + + @property + def channel_priority(self) -> str: + """ + The channel priority strategy. + + Returns: + Channel priority as a string (e.g., "Strict", "Flexible") + """ + ... + + @property + def solve_strategy(self) -> str: + """ + The solver strategy for resolving dependencies. + + Returns: + Solve strategy as a string + """ + ... + + @property + def timestamp(self) -> str: + """ + The build timestamp. + + Returns: + ISO 8601 timestamp string + """ + ... + + @property + def subpackages(self) -> Dict[str, Dict[str, Any]]: + """ + All subpackages from this output or other outputs from the same recipe. + + Returns: + Dictionary mapping package names to their identifiers + """ + ... + + @property + def packaging_settings(self) -> PackagingConfig: + """ + The packaging settings for this build. + + Returns: + PackagingConfig instance + """ + ... + + @property + def store_recipe(self) -> bool: + """ + Whether the recipe should be stored in the package. + + Returns: + True if recipe is stored, False otherwise + """ + ... + + @property + def force_colors(self) -> bool: + """ + Whether colors are forced in build script output. + + Returns: + True if colors are forced + """ + ... + + @property + def sandbox_config(self) -> Optional[SandboxConfig]: + """ + The sandbox configuration for this build. + + Returns: + SandboxConfig instance, or None if not configured + """ + ... + + @property + def debug(self) -> Debug: + """ + The debug configuration. + + Returns: + Debug instance + """ + ... + + @property + def exclude_newer(self) -> Optional[str]: + """ + Timestamp for excluding newer packages. + + Packages newer than this date are excluded from the solver. + + Returns: + ISO 8601 timestamp string, or None if not set + """ + ... + + def cross_compilation(self) -> bool: + """ + Check if this is a cross-compilation build. + + Returns: + True if target platform differs from build platform + """ + ... + + def target_platform_name(self) -> str: + """ + Get the target platform name only (without virtual packages). + + Returns: + Platform string + """ + ... + + def host_platform_name(self) -> str: + """ + Get the host platform name only (without virtual packages). + + Returns: + Platform string + """ + ... + + def build_platform_name(self) -> str: + """ + Get the build platform name only (without virtual packages). + + Returns: + Platform string + """ + ... + + +__all__ = ["BuildConfig"] diff --git a/py-rattler-build/rattler_build/debug.py b/py-rattler-build/rattler_build/debug.py new file mode 100644 index 000000000..186affd12 --- /dev/null +++ b/py-rattler-build/rattler_build/debug.py @@ -0,0 +1,108 @@ +"""Python bindings for Debug.""" + +from .rattler_build import Debug as _Debug + + +class Debug(_Debug): + """ + Control debug output during builds. + + Debug is a simple wrapper around a boolean that enables or disables + debug output during package builds. + + Examples: + Create with debug enabled: + >>> debug = Debug(True) + >>> assert debug.is_enabled() + + Create with debug disabled: + >>> debug = Debug(False) + >>> assert not debug.is_enabled() + + Use factory methods: + >>> debug = Debug.enabled() + >>> debug = Debug.disabled() + + Toggle debug mode: + >>> debug = Debug(False) + >>> debug.enable() + >>> assert debug.is_enabled() + >>> debug.toggle() + >>> assert not debug.is_enabled() + + Use as boolean: + >>> debug = Debug(True) + >>> if debug: + ... print("Debug is enabled") + """ + + def __init__(self, enabled: bool = False) -> None: + """ + Create a new Debug instance. + + Args: + enabled: Whether debug output is enabled (default: False) + """ + ... + + @staticmethod + def enabled() -> "Debug": + """ + Create a Debug instance with debug enabled. + + Returns: + Debug instance with debug enabled + """ + ... + + @staticmethod + def disabled() -> "Debug": + """ + Create a Debug instance with debug disabled. + + Returns: + Debug instance with debug disabled + """ + ... + + def is_enabled(self) -> bool: + """ + Check if debug output is enabled. + + Returns: + True if debug output is enabled, False otherwise + """ + ... + + def set_enabled(self, enabled: bool) -> None: + """ + Set whether debug output is enabled. + + Args: + enabled: Whether to enable debug output + """ + ... + + def enable(self) -> None: + """Enable debug output.""" + ... + + def disable(self) -> None: + """Disable debug output.""" + ... + + def toggle(self) -> None: + """Toggle debug output (enabled ↔ disabled).""" + ... + + def __bool__(self) -> bool: + """ + Boolean conversion. + + Returns: + True if debug is enabled, False otherwise + """ + ... + + +__all__ = ["Debug"] diff --git a/py-rattler-build/rattler_build/directories.py b/py-rattler-build/rattler_build/directories.py new file mode 100644 index 000000000..96662c2e2 --- /dev/null +++ b/py-rattler-build/rattler_build/directories.py @@ -0,0 +1,113 @@ +"""Python bindings for Directories.""" + +from pathlib import Path +from .rattler_build import Directories as _Directories + + +class Directories(_Directories): + """ + Directory structure used during package builds. + + Represents the various paths and directories used during the conda package + build process, including recipe, cache, work, host and build directories. + + Note: + This class is typically created internally during the build process. + The properties are read-only from Python. + + Examples: + Access directory information (from a build context): + >>> dirs = get_build_directories() # From a build + >>> print(f"Recipe: {dirs.recipe_dir}") + >>> print(f"Work: {dirs.work_dir}") + >>> print(f"Host prefix: {dirs.host_prefix}") + >>> print(f"Output: {dirs.output_dir}") + """ + + @property + def recipe_dir(self) -> Path: + """ + The directory where the recipe is located. + + Returns: + Path to the recipe directory + """ + ... + + @property + def recipe_path(self) -> Path: + """ + The path to the recipe file itself. + + Returns: + Path to the recipe file + """ + ... + + @property + def cache_dir(self) -> Path: + """ + The folder where the build cache is located. + + Returns: + Path to the cache directory + """ + ... + + @property + def host_prefix(self) -> Path: + """ + The directory where host dependencies are installed. + + This is exposed as $PREFIX (or %PREFIX% on Windows) in the build script. + + Returns: + Path to the host prefix directory + """ + ... + + @property + def build_prefix(self) -> Path: + """ + The directory where build dependencies are installed. + + This is exposed as $BUILD_PREFIX (or %BUILD_PREFIX% on Windows) in the + build script. + + Returns: + Path to the build prefix directory + """ + ... + + @property + def work_dir(self) -> Path: + """ + The directory where the source code is copied to and built from. + + Returns: + Path to the work directory + """ + ... + + @property + def build_dir(self) -> Path: + """ + The parent directory of host, build and work directories. + + Returns: + Path to the build directory + """ + ... + + @property + def output_dir(self) -> Path: + """ + The output directory or local channel directory where packages are written. + + Returns: + Path to the output directory + """ + ... + + +__all__ = ["Directories"] diff --git a/py-rattler-build/rattler_build/packaging_settings.py b/py-rattler-build/rattler_build/packaging_settings.py new file mode 100644 index 000000000..3d6eaf9c9 --- /dev/null +++ b/py-rattler-build/rattler_build/packaging_settings.py @@ -0,0 +1,169 @@ +"""Python bindings for PackagingConfig.""" + +from enum import Enum +from .rattler_build import ( + PackagingConfig as _PackagingConfig, + ArchiveType as _ArchiveType, +) + + +class ArchiveType(Enum): + """ + Archive format for conda packages. + + Attributes: + TarBz2: Traditional .tar.bz2 format + Conda: Modern .conda format (recommended) + """ + + TarBz2 = _ArchiveType.TarBz2 + Conda = _ArchiveType.Conda + + +class PackagingConfig(_PackagingConfig): + """ + Configuration for package format and compression. + + Controls the archive format (.tar.bz2 or .conda) and compression level + when creating conda packages. + + Examples: + Create with defaults: + >>> settings = PackagingConfig.conda() + >>> settings = PackagingConfig.tar_bz2() + + Create with custom compression: + >>> settings = PackagingConfig(ArchiveType.Conda, compression_level=15) + >>> settings = PackagingConfig.conda(compression_level=10) + + Modify settings: + >>> settings = PackagingConfig.conda() + >>> settings.compression_level = 18 + >>> settings.archive_type = ArchiveType.TarBz2 + + Check format: + >>> if settings.is_conda(): + ... print(f"Using {settings.extension()} format") + """ + + def __init__( + self, + archive_type: ArchiveType, + compression_level: int | None = None, + ) -> None: + """ + Create a new PackagingConfig. + + Args: + archive_type: The archive format (TarBz2 or Conda) + compression_level: Compression level + - For tar.bz2: 1-9 (default 9) + - For conda: -7 to 22 (default 22) + - Higher values = better compression but slower + + Raises: + ValueError: If compression_level is out of valid range + """ + ... + + @staticmethod + def tar_bz2(compression_level: int = 9) -> "PackagingConfig": + """ + Create PackagingConfig for tar.bz2 format. + + Args: + compression_level: Compression level (1-9, default 9) + + Returns: + PackagingConfig configured for tar.bz2 + + Raises: + ValueError: If compression_level is not 1-9 + """ + ... + + @staticmethod + def conda(compression_level: int = 22) -> "PackagingConfig": + """ + Create PackagingConfig for conda format (recommended). + + The .conda format is faster to extract and provides better compression + than tar.bz2. It is the recommended format for new packages. + + Args: + compression_level: Compression level (-7 to 22, default 22) + - Negative values: faster compression, larger files + - Positive values: slower compression, smaller files + - 22: maximum compression (recommended) + + Returns: + PackagingConfig configured for .conda format + + Raises: + ValueError: If compression_level is not -7 to 22 + """ + ... + + @property + def archive_type(self) -> ArchiveType: + """The archive format (TarBz2 or Conda).""" + ... + + @archive_type.setter + def archive_type(self, value: ArchiveType) -> None: + """Set the archive format.""" + ... + + @property + def compression_level(self) -> int: + """ + The compression level. + + Valid ranges: + - tar.bz2: 1-9 + - conda: -7 to 22 + """ + ... + + @compression_level.setter + def compression_level(self, value: int) -> None: + """ + Set the compression level. + + Args: + value: Compression level (validated based on archive_type) + + Raises: + ValueError: If value is out of range for the current archive type + """ + ... + + def extension(self) -> str: + """ + Get the file extension for the current archive type. + + Returns: + ".tar.bz2" or ".conda" + """ + ... + + def is_tar_bz2(self) -> bool: + """ + Check if this is using the tar.bz2 format. + + Returns: + True if using tar.bz2 format + """ + ... + + def is_conda(self) -> bool: + """ + Check if this is using the conda format. + + Returns: + True if using conda format + """ + ... + + +__all__ = ["PackagingConfig", "ArchiveType"] diff --git a/py-rattler-build/rattler_build/sandbox_config.py b/py-rattler-build/rattler_build/sandbox_config.py new file mode 100644 index 000000000..82d2e1e4b --- /dev/null +++ b/py-rattler-build/rattler_build/sandbox_config.py @@ -0,0 +1,151 @@ +"""Python bindings for SandboxConfig.""" + +from pathlib import Path +from typing import List, Optional +from .rattler_build import SandboxConfig as _SandboxConfig + + +class SandboxConfig(_SandboxConfig): + """ + Configuration for build sandboxing and isolation. + + Controls network access and filesystem permissions during package builds. + + Examples: + Create a basic sandbox configuration: + >>> config = SandboxConfig( + ... allow_network=False, + ... read=["/usr", "/etc"], + ... read_execute=["/bin", "/usr/bin"], + ... read_write=["/tmp"] + ... ) + + Use platform defaults: + >>> macos_config = SandboxConfig.for_macos() + >>> linux_config = SandboxConfig.for_linux() + + Modify permissions: + >>> config = SandboxConfig.for_linux() + >>> config.allow_network = True + >>> config.add_read_write(Path("/my/custom/path")) + """ + + def __init__( + self, + allow_network: bool = False, + read: Optional[List[Path]] = None, + read_execute: Optional[List[Path]] = None, + read_write: Optional[List[Path]] = None, + ) -> None: + """ + Create a new SandboxConfiguration. + + Args: + allow_network: Whether to allow network access during the build + read: List of paths that can be read + read_execute: List of paths that can be read and executed + read_write: List of paths that can be read and written + """ + ... + + @property + def allow_network(self) -> bool: + """Whether network access is allowed.""" + ... + + @allow_network.setter + def allow_network(self, value: bool) -> None: + """Set whether network access is allowed.""" + ... + + @property + def read(self) -> List[Path]: + """List of read-only paths.""" + ... + + @read.setter + def read(self, value: List[Path]) -> None: + """Set the list of read-only paths.""" + ... + + @property + def read_execute(self) -> List[Path]: + """List of read-execute paths.""" + ... + + @read_execute.setter + def read_execute(self, value: List[Path]) -> None: + """Set the list of read-execute paths.""" + ... + + @property + def read_write(self) -> List[Path]: + """List of read-write paths.""" + ... + + @read_write.setter + def read_write(self, value: List[Path]) -> None: + """Set the list of read-write paths.""" + ... + + @staticmethod + def for_macos() -> "SandboxConfig": + """ + Create a default sandbox configuration for macOS. + + This configuration includes: + - Network access: disabled + - Read access: entire filesystem + - Read-execute: /bin, /usr/bin + - Read-write: /tmp, /var/tmp, $TMPDIR + + Returns: + A SandboxConfig configured for macOS + """ + ... + + @staticmethod + def for_linux() -> "SandboxConfig": + """ + Create a default sandbox configuration for Linux. + + This configuration includes: + - Network access: disabled + - Read access: entire filesystem + - Read-execute: /bin, /usr/bin, /lib*, /usr/lib* + - Read-write: /tmp, /dev/shm, $TMPDIR + + Returns: + A SandboxConfig configured for Linux + """ + ... + + def add_read(self, path: Path) -> None: + """ + Add a path to the read-only list. + + Args: + path: Path to add to the read-only list + """ + ... + + def add_read_execute(self, path: Path) -> None: + """ + Add a path to the read-execute list. + + Args: + path: Path to add to the read-execute list + """ + ... + + def add_read_write(self, path: Path) -> None: + """ + Add a path to the read-write list. + + Args: + path: Path to add to the read-write list + """ + ... + + +__all__ = ["SandboxConfig"] diff --git a/py-rattler-build/rattler_build/test_configuration.py b/py-rattler-build/rattler_build/test_configuration.py new file mode 100644 index 000000000..d319b6dde --- /dev/null +++ b/py-rattler-build/rattler_build/test_configuration.py @@ -0,0 +1,162 @@ +"""Python bindings for TestConfig.""" + +from pathlib import Path +from typing import List, Optional +from .rattler_build import TestConfig as _TestConfig +from .debug import Debug + + +class TestConfig(_TestConfig): + """ + Configuration for package testing. + + TestConfig controls the settings for testing conda packages, + including test environment location, platforms, channels, and solver settings. + + This is typically created internally during test runs and exposed for + inspection. + + Note: + This class is read-only. Properties cannot be modified from Python. + + Examples: + Access test configuration (from test context): + >>> config = get_test_config() # From test run + >>> print(f"Testing in: {config.test_prefix}") + >>> print(f"Target: {config.target_platform}") + >>> print(f"Channels: {config.channels}") + >>> if config.debug: + ... print("Debug mode enabled") + """ + + @property + def test_prefix(self) -> Path: + """ + The test prefix directory path. + + The directory where the test environment is created. + + Returns: + Path to the test prefix directory + """ + ... + + @property + def target_platform(self) -> Optional[str]: + """ + The target platform for the package. + + Returns: + Target platform string (e.g., "linux-64"), or None if not set + """ + ... + + @property + def host_platform(self) -> Optional[str]: + """ + The host platform for runtime dependencies. + + Returns: + Host platform string, or None if not set + """ + ... + + @property + def current_platform(self) -> str: + """ + The current platform running the tests. + + Returns: + Current platform string + """ + ... + + @property + def keep_test_prefix(self) -> bool: + """ + Whether to keep the test prefix after the test completes. + + If True, the test environment directory is preserved for debugging. + If False, it's deleted after the test. + + Returns: + True if test prefix is kept, False if deleted + """ + ... + + @property + def test_index(self) -> Optional[int]: + """ + The index of the specific test to execute. + + If set, only this test will be run. If None, all tests are executed. + + Returns: + Test index (0-based), or None for all tests + """ + ... + + @property + def channels(self) -> List[str]: + """ + The channels used for resolving test dependencies. + + Returns: + List of channel URLs as strings + """ + ... + + @property + def channel_priority(self) -> str: + """ + The channel priority strategy. + + Returns: + Channel priority as a string (e.g., "Strict", "Flexible") + """ + ... + + @property + def solve_strategy(self) -> str: + """ + The solver strategy for resolving dependencies. + + Returns: + Solve strategy as a string + """ + ... + + @property + def output_dir(self) -> Path: + """ + The output directory for test artifacts. + + Returns: + Path to the output directory + """ + ... + + @property + def debug(self) -> Debug: + """ + The debug configuration. + + Returns: + Debug instance indicating if debug mode is enabled + """ + ... + + @property + def exclude_newer(self) -> Optional[str]: + """ + Timestamp for excluding newer packages. + + Packages released after this timestamp are excluded from the solver. + + Returns: + ISO 8601 timestamp string, or None if not set + """ + ... + + +__all__ = ["TestConfig"] diff --git a/py-rattler-build/rattler_build/variant_config.py b/py-rattler-build/rattler_build/variant_config.py new file mode 100644 index 000000000..2ab7124b5 --- /dev/null +++ b/py-rattler-build/rattler_build/variant_config.py @@ -0,0 +1,550 @@ +"""Object-oriented interface for variant configuration. + +This module provides Python wrappers around the Rust implementation of variant +configuration handling in rattler-build. Variant configurations allow you to +define build matrices for packages. + +Examples: + Create a simple variant configuration: + + >>> from rattler_build import Pin, VariantConfig + >>> pin = Pin(max_pin="x.x", min_pin="x.x.x") + >>> config = VariantConfig( + ... variants={ + ... "python": ["3.9", "3.10", "3.11"], + ... "numpy": ["1.21", "1.22"] + ... } + ... ) + >>> len(config.variants["python"]) + 3 + + Using zip_keys to create specific variant combinations: + + >>> config = VariantConfig( + ... zip_keys=[["python", "numpy"]], + ... variants={ + ... "python": ["3.9", "3.10", "3.11"], + ... "numpy": ["1.21", "1.22", "1.23"] + ... } + ... ) + >>> config.zip_keys + [['python', 'numpy']] + + Adding pin_run_as_build constraints: + + >>> config = VariantConfig( + ... pin_run_as_build={ + ... "python": Pin(max_pin="x.x"), + ... "numpy": Pin(max_pin="x.x", min_pin="x.x.x.x") + ... }, + ... variants={ + ... "python": ["3.9", "3.10"], + ... "numpy": ["1.21"] + ... } + ... ) + >>> "python" in config.pin_run_as_build + True +""" + +from typing import Dict, List, Optional, Any, Union +from pathlib import Path +from .rattler_build import PyPin, PyVariantConfig +from .recipe import SelectorConfig + + +class Pin: + """Pin configuration for a package version constraint. + + A Pin defines version constraints for packages using a pin syntax + (e.g., "x.x" means pin to the major.minor version). + + Args: + max_pin: Maximum version pin pattern (e.g., "x.x" for major.minor) + min_pin: Minimum version pin pattern (e.g., "x.x.x" for major.minor.patch) + + Examples: + >>> pin = Pin(max_pin="x.x") + >>> pin.max_pin + 'x.x' + + >>> pin = Pin(max_pin="x.x", min_pin="x.x.x.x") + >>> pin.min_pin + 'x.x.x.x' + + >>> pin = Pin() + >>> pin.max_pin is None + True + + Modify pin after creation: + + >>> pin = Pin(max_pin="x.x") + >>> pin.min_pin = "x.x.x" + >>> pin.min_pin + 'x.x.x' + """ + + def __init__(self, max_pin: Optional[str] = None, min_pin: Optional[str] = None): + """Initialize a Pin with optional max and min pin patterns.""" + self._inner = PyPin(max_pin=max_pin, min_pin=min_pin) + + @property + def max_pin(self) -> Optional[str]: + """Get the maximum pin pattern. + + Returns: + The maximum pin pattern or None if not set. + + Examples: + >>> pin = Pin(max_pin="x.x") + >>> pin.max_pin + 'x.x' + """ + return self._inner.max_pin + + @max_pin.setter + def max_pin(self, value: Optional[str]) -> None: + """Set the maximum pin pattern. + + Args: + value: The maximum pin pattern or None. + + Examples: + >>> pin = Pin() + >>> pin.max_pin = "x.x.x" + >>> pin.max_pin + 'x.x.x' + """ + self._inner.max_pin = value + + @property + def min_pin(self) -> Optional[str]: + """Get the minimum pin pattern. + + Returns: + The minimum pin pattern or None if not set. + + Examples: + >>> pin = Pin(min_pin="x.x.x.x") + >>> pin.min_pin + 'x.x.x.x' + """ + return self._inner.min_pin + + @min_pin.setter + def min_pin(self, value: Optional[str]) -> None: + """Set the minimum pin pattern. + + Args: + value: The minimum pin pattern or None. + + Examples: + >>> pin = Pin() + >>> pin.min_pin = "x.x" + >>> pin.min_pin + 'x.x' + """ + self._inner.min_pin = value + + def __repr__(self) -> str: + """Return a string representation of the Pin. + + Examples: + >>> pin = Pin(max_pin="x.x", min_pin="x.x.x") + >>> repr(pin) + "Pin(max_pin='x.x', min_pin='x.x.x')" + """ + return f"Pin(max_pin={self.max_pin!r}, min_pin={self.min_pin!r})" + + def __eq__(self, other: object) -> bool: + """Check equality with another Pin. + + Examples: + >>> pin1 = Pin(max_pin="x.x") + >>> pin2 = Pin(max_pin="x.x") + >>> pin1 == pin2 + True + >>> pin3 = Pin(max_pin="x.x.x") + >>> pin1 == pin3 + False + """ + if not isinstance(other, Pin): + return NotImplemented + return self.max_pin == other.max_pin and self.min_pin == other.min_pin + + +class VariantConfig: + """Variant configuration for package builds. + + A VariantConfig defines the build matrix for a package, including: + - Variable variants (e.g., different Python versions) + - Pin configurations for run-time dependencies + - Zip keys to create specific variant combinations + + Args: + pin_run_as_build: Mapping of package names to Pin configurations + zip_keys: List of lists defining which variants should be zipped together + variants: Mapping of variant names to lists of possible values + + Examples: + Create a basic variant configuration: + + >>> config = VariantConfig( + ... variants={ + ... "python": ["3.9", "3.10", "3.11"] + ... } + ... ) + >>> config.variants["python"] + ['3.9', '3.10', '3.11'] + + Use pin_run_as_build: + + >>> config = VariantConfig( + ... pin_run_as_build={ + ... "python": Pin(max_pin="x.x") + ... }, + ... variants={ + ... "python": ["3.9", "3.10"] + ... } + ... ) + >>> config.pin_run_as_build["python"].max_pin + 'x.x' + + Use zip_keys to control variant combinations: + + >>> config = VariantConfig( + ... zip_keys=[["python", "numpy"]], + ... variants={ + ... "python": ["3.9", "3.10"], + ... "numpy": ["1.21", "1.22"] + ... } + ... ) + >>> config.zip_keys + [['python', 'numpy']] + + Modify configuration after creation: + + >>> config = VariantConfig() + >>> config.variants = {"cuda": ["11.8", "12.0"]} + >>> config.variants["cuda"] + ['11.8', '12.0'] + """ + + def __init__( + self, + pin_run_as_build: Optional[Dict[str, Pin]] = None, + zip_keys: Optional[List[List[str]]] = None, + variants: Optional[Dict[str, List[Any]]] = None, + ): + """Initialize a VariantConfig with optional parameters.""" + # Convert Pin wrappers to PyPin + py_pin_run_as_build = None + if pin_run_as_build is not None: + py_pin_run_as_build = {k: v._inner for k, v in pin_run_as_build.items()} + + self._inner = PyVariantConfig( + pin_run_as_build=py_pin_run_as_build, + zip_keys=zip_keys, + variants=variants, + ) + + @property + def pin_run_as_build(self) -> Optional[Dict[str, Pin]]: + """Get the pin_run_as_build mapping. + + Returns: + Dictionary mapping package names to Pin objects, or None. + + Examples: + >>> config = VariantConfig( + ... pin_run_as_build={"python": Pin(max_pin="x.x")} + ... ) + >>> config.pin_run_as_build["python"].max_pin + 'x.x' + """ + inner_pins = self._inner.pin_run_as_build + if inner_pins is None: + return None + return {k: Pin(max_pin=v.max_pin, min_pin=v.min_pin) for k, v in inner_pins.items()} + + @pin_run_as_build.setter + def pin_run_as_build(self, value: Optional[Dict[str, Pin]]) -> None: + """Set the pin_run_as_build mapping. + + Args: + value: Dictionary mapping package names to Pin objects, or None. + + Examples: + >>> config = VariantConfig() + >>> config.pin_run_as_build = {"numpy": Pin(max_pin="x.x")} + >>> config.pin_run_as_build["numpy"].max_pin + 'x.x' + """ + if value is None: + self._inner.pin_run_as_build = None + else: + self._inner.pin_run_as_build = {k: v._inner for k, v in value.items()} + + @property + def zip_keys(self) -> Optional[List[List[str]]]: + """Get the zip_keys configuration. + + Zip keys are used to "zip" together variants to create specific + combinations. For example, if you have python=[3.9, 3.10] and + numpy=[1.21, 1.22], and zip_keys=[["python", "numpy"]], then + the variants will be (3.9, 1.21) and (3.10, 1.22) instead of + all four combinations. + + Returns: + List of lists of variant names to zip together, or None. + + Examples: + >>> config = VariantConfig(zip_keys=[["python", "numpy"]]) + >>> config.zip_keys + [['python', 'numpy']] + """ + return self._inner.zip_keys + + @zip_keys.setter + def zip_keys(self, value: Optional[List[List[str]]]) -> None: + """Set the zip_keys configuration. + + Args: + value: List of lists of variant names to zip together, or None. + + Examples: + >>> config = VariantConfig() + >>> config.zip_keys = [["cuda", "cudnn"]] + >>> config.zip_keys + [['cuda', 'cudnn']] + """ + self._inner.zip_keys = value + + @property + def variants(self) -> Dict[str, List[Any]]: + """Get the variants mapping. + + Returns: + Dictionary mapping variant names to lists of possible values. + + Examples: + >>> config = VariantConfig(variants={"python": ["3.9", "3.10"]}) + >>> config.variants["python"] + ['3.9', '3.10'] + + Variants can contain different types: + + >>> config = VariantConfig(variants={ + ... "python": ["3.9", "3.10"], + ... "cuda_enabled": [True, False], + ... "cuda_version": [11, 12] + ... }) + >>> config.variants["cuda_enabled"] + [True, False] + """ + return self._inner.variants + + @variants.setter + def variants(self, value: Dict[str, List[Any]]) -> None: + """Set the variants mapping. + + Args: + value: Dictionary mapping variant names to lists of possible values. + + Examples: + >>> config = VariantConfig() + >>> config.variants = {"rust": ["1.70", "1.71"]} + >>> config.variants["rust"] + ['1.70', '1.71'] + """ + self._inner.variants = value + + def __repr__(self) -> str: + """Return a string representation of the VariantConfig. + + Examples: + >>> config = VariantConfig(variants={"python": ["3.9"]}) + >>> "VariantConfig" in repr(config) + True + """ + pin_keys = list(self.pin_run_as_build.keys()) if self.pin_run_as_build else [] + variant_keys = list(self.variants.keys()) + return ( + f"VariantConfig(" f"pin_run_as_build={pin_keys}, " f"zip_keys={self.zip_keys}, " f"variants={variant_keys})" + ) + + def __eq__(self, other: object) -> bool: + """Check equality with another VariantConfig. + + Examples: + >>> config1 = VariantConfig(variants={"python": ["3.9"]}) + >>> config2 = VariantConfig(variants={"python": ["3.9"]}) + >>> config1 == config2 + True + """ + if not isinstance(other, VariantConfig): + return NotImplemented + return ( + self.pin_run_as_build == other.pin_run_as_build + and self.zip_keys == other.zip_keys + and self.variants == other.variants + ) + + def merge(self, other: "VariantConfig") -> None: + """Merge another VariantConfig into this one. + + This modifies the current config in-place by merging values from `other`: + - Variants are extended (keys from `other` replace keys in `self`) + - pin_run_as_build entries are extended + - zip_keys are replaced (not merged) + + Args: + other: Another VariantConfig to merge into this one + + Examples: + >>> config1 = VariantConfig(variants={"python": ["3.9"]}) + >>> config2 = VariantConfig(variants={"numpy": ["1.21"]}) + >>> config1.merge(config2) + >>> sorted(config1.variants.keys()) + ['numpy', 'python'] + + Merging replaces existing keys: + + >>> config1 = VariantConfig(variants={"python": ["3.9"]}) + >>> config2 = VariantConfig(variants={"python": ["3.10"]}) + >>> config1.merge(config2) + >>> config1.variants["python"] + ['3.10'] + + zip_keys are replaced, not merged: + + >>> config1 = VariantConfig( + ... zip_keys=[["python", "numpy"]], + ... variants={"python": ["3.9"]} + ... ) + >>> config2 = VariantConfig( + ... zip_keys=[["cuda", "cudnn"]], + ... variants={"cuda": ["11.8"]} + ... ) + >>> config1.merge(config2) + >>> config1.zip_keys + [['cuda', 'cudnn']] + """ + self._inner.merge(other._inner) + + @staticmethod + def from_file(file: Union[str, Path], selector_config: Optional[SelectorConfig] = None) -> "VariantConfig": + """Load a VariantConfig from a single YAML file. + + This function loads a single variant configuration file. The file can be + either a variant config file (e.g., variants.yaml) or a conda-build config + file (conda_build_config.yaml). + + Note: The target_platform and build_platform are automatically inserted + into the variants based on the selector_config. + + Args: + file: Path to variant configuration file + selector_config: Optional SelectorConfig for platform selection and rendering. + If not provided, uses current platform defaults. + + Returns: + A new VariantConfig with the configuration from the file. + + Raises: + RattlerBuildError: If file cannot be loaded or parsed + + Examples: + Load a single variant config file: + + >>> # config = VariantConfig.from_file("variants.yaml") + >>> # config.variants["python"] + >>> # ['3.9', '3.10', '3.11'] + + Load with specific platform: + + >>> from rattler_build import SelectorConfig + >>> selector = SelectorConfig(target_platform="linux-64") + >>> # config = VariantConfig.from_file( + >>> # "variants.yaml", + >>> # selector_config=selector + >>> # ) + """ + # Convert string path to Path object + path = Path(file) if isinstance(file, str) else file + + # Create default selector config if not provided + if selector_config is None: + selector_config = SelectorConfig() + + # Load from Rust + rust_config = PyVariantConfig.from_file(path, selector_config._config) + + # Wrap in Python class + result = VariantConfig() + result._inner = rust_config + return result + + @staticmethod + def from_files(files: List[Union[str, Path]], selector_config: Optional[SelectorConfig] = None) -> "VariantConfig": + """Load a VariantConfig from one or more YAML files. + + This function loads and merges multiple variant configuration files. + Files can be either: + - Variant config files (e.g., variants.yaml) + - Conda-build config files (conda_build_config.yaml) + + Files are processed in order, with later files overriding earlier ones + for the same keys (values are not merged, only replaced). + + Args: + files: List of paths to variant configuration files + selector_config: Optional SelectorConfig for platform selection and rendering. + If not provided, uses current platform defaults. + + Returns: + A new VariantConfig with the merged configuration from all files. + + Raises: + RattlerBuildError: If files cannot be loaded or parsed + + Examples: + Load a single variant config file: + + >>> # Assuming variants.yaml exists with python versions + >>> # config = VariantConfig.from_files(["variants.yaml"]) + >>> # config.variants["python"] + >>> # ['3.9', '3.10', '3.11'] + + Load and merge multiple config files: + + >>> # config = VariantConfig.from_files([ + >>> # "variants.yaml", + >>> # "conda_build_config.yaml" + >>> # ]) + + Load with specific platform: + + >>> from rattler_build import SelectorConfig + >>> selector = SelectorConfig(target_platform="linux-64") + >>> # config = VariantConfig.from_files( + >>> # ["variants.yaml"], + >>> # selector_config=selector + >>> # ) + """ + # Convert string paths to Path objects + path_list = [Path(f) if isinstance(f, str) else f for f in files] + + # Create default selector config if not provided + if selector_config is None: + selector_config = SelectorConfig() + + # Load from Rust + rust_config = PyVariantConfig.from_files(path_list, selector_config._config) + + # Wrap in Python class + result = VariantConfig() + result._inner = rust_config + return result + + +__all__ = ["Pin", "VariantConfig"] diff --git a/py-rattler-build/src/build_configuration.rs b/py-rattler-build/src/build_configuration.rs new file mode 100644 index 000000000..115754f1d --- /dev/null +++ b/py-rattler-build/src/build_configuration.rs @@ -0,0 +1,310 @@ +//! Python bindings for BuildConfig +//! +//! This module provides Python wrappers for the Rust BuildConfiguration struct, +//! which contains all configuration for building a package. + +use crate::{PyDebug, PyDirectories, PyPackagingConfig, PySandboxConfig}; +use ::rattler_build::metadata::BuildConfiguration as RustBuildConfiguration; +use pyo3::prelude::*; +use std::collections::HashMap; + +/// Python wrapper for BuildConfig struct. +/// +/// BuildConfig contains the complete configuration for building a package, +/// including platforms, variants, channels, directories, and all build settings. +/// +/// This is a read-only wrapper that exposes all configuration properties for +/// inspection. Typically created internally during the build process. +/// +/// Examples: +/// Access build configuration (from build context): +/// >>> config = get_build_config() +/// >>> print(f"Target: {config.target_platform}") +/// >>> print(f"Hash: {config.hash}") +/// >>> if config.cross_compilation(): +/// ... print("Cross-compiling!") +#[pyclass(name = "BuildConfig")] +#[derive(Clone)] +pub struct PyBuildConfig { + pub(crate) inner: RustBuildConfiguration, +} + +#[pymethods] +impl PyBuildConfig { + /// Get the target platform. + /// + /// The platform for which the package is being built. + /// + /// Returns: + /// Target platform string (e.g., "linux-64", "osx-arm64") + #[getter] + fn target_platform(&self) -> String { + self.inner.target_platform.to_string() + } + + /// Get the host platform. + /// + /// The platform where the package will run (usually same as target, + /// but different for noarch packages). + /// + /// Returns: + /// Dictionary with 'platform' (str) and 'virtual_packages' (list) keys + #[getter] + fn host_platform(&self, py: Python<'_>) -> PyResult> { + let platform_dict = pyo3::types::PyDict::new(py); + platform_dict.set_item("platform", self.inner.host_platform.platform.to_string())?; + + let virt_packages: Vec = self + .inner + .host_platform + .virtual_packages + .iter() + .map(|vp| format!("{}", vp)) + .collect(); + platform_dict.set_item("virtual_packages", virt_packages)?; + + Ok(platform_dict.into()) + } + + /// Get the build platform. + /// + /// The platform on which the build is running. + /// + /// Returns: + /// Dictionary with 'platform' (str) and 'virtual_packages' (list) keys + #[getter] + fn build_platform(&self, py: Python<'_>) -> PyResult> { + let platform_dict = pyo3::types::PyDict::new(py); + platform_dict.set_item("platform", self.inner.build_platform.platform.to_string())?; + + let virt_packages: Vec = self + .inner + .build_platform + .virtual_packages + .iter() + .map(|vp| format!("{}", vp)) + .collect(); + platform_dict.set_item("virtual_packages", virt_packages)?; + + Ok(platform_dict.into()) + } + + /// Get the variant configuration. + /// + /// The selected variant for this build (e.g., python version, numpy version). + /// + /// Returns: + /// Dictionary mapping variant keys to their values + #[getter] + fn variant(&self, py: Python<'_>) -> PyResult> { + let mut variant_dict = HashMap::new(); + for (key, value) in &self.inner.variant { + let json_value = serde_json::to_value(value) + .map_err(|e| PyErr::new::(e.to_string()))?; + variant_dict.insert(key.normalize(), json_value); + } + + pythonize::pythonize(py, &variant_dict) + .map(|obj| obj.into()) + .map_err(|e| PyErr::new::(e.to_string())) + } + + /// Get the variant hash. + /// + /// The computed hash of the variant configuration. + /// + /// Returns: + /// Hash string (e.g., "h1234567_0") + #[getter] + fn hash(&self) -> String { + self.inner.hash.hash.clone() + } + + /// Get the build directories. + /// + /// Returns: + /// Directories instance with all build paths + #[getter] + fn directories(&self) -> PyDirectories { + PyDirectories { + inner: self.inner.directories.clone(), + } + } + + /// Get the channels. + /// + /// The channels used for resolving dependencies. + /// + /// Returns: + /// List of channel URLs as strings + #[getter] + fn channels(&self) -> Vec { + self.inner.channels.iter().map(|c| c.to_string()).collect() + } + + /// Get the channel priority. + /// + /// Returns: + /// Channel priority as a string (e.g., "Strict", "Flexible") + #[getter] + fn channel_priority(&self) -> String { + format!("{:?}", self.inner.channel_priority) + } + + /// Get the solve strategy. + /// + /// Returns: + /// Solve strategy as a string + #[getter] + fn solve_strategy(&self) -> String { + format!("{:?}", self.inner.solve_strategy) + } + + /// Get the build timestamp. + /// + /// Returns: + /// ISO 8601 timestamp string + #[getter] + fn timestamp(&self) -> String { + self.inner.timestamp.to_rfc3339() + } + + /// Get the subpackages. + /// + /// All subpackages from this output or other outputs from the same recipe. + /// + /// Returns: + /// Dictionary mapping package names to their identifiers + #[getter] + fn subpackages(&self, py: Python<'_>) -> PyResult> { + let subpackages_dict = pyo3::types::PyDict::new(py); + for (name, identifier) in &self.inner.subpackages { + let pkg_dict = pyo3::types::PyDict::new(py); + pkg_dict.set_item("name", name.as_normalized())?; + pkg_dict.set_item("version", identifier.version.to_string())?; + pkg_dict.set_item("build_string", identifier.build_string.clone())?; + subpackages_dict.set_item(name.as_normalized(), pkg_dict)?; + } + + Ok(subpackages_dict.into()) + } + + /// Get the packaging settings. + /// + /// Returns: + /// PackagingConfig instance + #[getter] + fn packaging_settings(&self) -> PyPackagingConfig { + PyPackagingConfig { + inner: self.inner.packaging_settings.clone(), + } + } + + /// Check if recipe should be stored in the package. + /// + /// Returns: + /// True if recipe is stored, False otherwise + #[getter] + fn store_recipe(&self) -> bool { + self.inner.store_recipe + } + + /// Check if forced colors are enabled. + /// + /// Returns: + /// True if colors are forced in build script + #[getter] + fn force_colors(&self) -> bool { + self.inner.force_colors + } + + /// Get the sandbox configuration. + /// + /// Returns: + /// SandboxConfig instance, or None if not configured + #[getter] + fn sandbox_config(&self) -> Option { + self.inner + .sandbox_config + .as_ref() + .map(|sc| PySandboxConfig { inner: sc.clone() }) + } + + /// Get the debug configuration. + /// + /// Returns: + /// Debug instance + #[getter] + fn debug(&self) -> PyDebug { + PyDebug { + inner: self.inner.debug, + } + } + + /// Get the exclude_newer timestamp. + /// + /// Packages newer than this date are excluded from the solver. + /// + /// Returns: + /// ISO 8601 timestamp string, or None if not set + #[getter] + fn exclude_newer(&self) -> Option { + self.inner.exclude_newer.map(|dt| dt.to_rfc3339()) + } + + /// Check if this is a cross-compilation build. + /// + /// Returns: + /// True if target platform differs from build platform + fn cross_compilation(&self) -> bool { + self.inner.cross_compilation() + } + + /// Get the target platform name only (without virtual packages). + /// + /// Returns: + /// Platform string + fn target_platform_name(&self) -> String { + self.inner.target_platform.to_string() + } + + /// Get the host platform name only (without virtual packages). + /// + /// Returns: + /// Platform string + fn host_platform_name(&self) -> String { + self.inner.host_platform.platform.to_string() + } + + /// Get the build platform name only (without virtual packages). + /// + /// Returns: + /// Platform string + fn build_platform_name(&self) -> String { + self.inner.build_platform.platform.to_string() + } + + /// String representation of the BuildConfig. + fn __repr__(&self) -> String { + format!( + "BuildConfig(target_platform='{}', hash='{}', cross_compilation={})", + self.inner.target_platform, + self.inner.hash.hash, + self.cross_compilation() + ) + } + + /// Detailed string representation. + fn __str__(&self) -> String { + format!( + "BuildConfig:\n Target: {}\n Host: {}\n Build: {}\n Hash: {}\n Cross-compilation: {}\n Channels: {}\n Debug: {}", + self.inner.target_platform, + self.inner.host_platform.platform, + self.inner.build_platform.platform, + self.inner.hash.hash, + self.cross_compilation(), + self.inner.channels.len(), + self.inner.debug.is_enabled() + ) + } +} diff --git a/py-rattler-build/src/debug.rs b/py-rattler-build/src/debug.rs new file mode 100644 index 000000000..f9b02ceae --- /dev/null +++ b/py-rattler-build/src/debug.rs @@ -0,0 +1,120 @@ +//! Python bindings for Debug +//! +//! This module provides Python wrappers for the Rust Debug struct, +//! which controls debug output during builds. + +use ::rattler_build::metadata::Debug as RustDebug; +use pyo3::prelude::*; + +/// Python wrapper for Debug struct. +/// +/// Debug is a simple wrapper around a boolean that controls whether +/// debug output is enabled during package builds. +/// +/// Examples: +/// Enable debug mode: +/// >>> debug = Debug(True) +/// >>> assert debug.is_enabled() +/// +/// Disable debug mode: +/// >>> debug = Debug(False) +/// >>> assert not debug.is_enabled() +/// +/// Toggle debug mode: +/// >>> debug = Debug.enabled() +/// >>> debug.set_enabled(False) +#[pyclass(name = "Debug")] +#[derive(Clone, Debug)] +pub struct PyDebug { + pub(crate) inner: RustDebug, +} + +#[pymethods] +impl PyDebug { + /// Create a new Debug instance. + /// + /// Args: + /// enabled: Whether debug output is enabled (default: False) + /// + /// Returns: + /// A new Debug instance + #[new] + #[pyo3(signature = (enabled=false))] + fn new(enabled: bool) -> Self { + PyDebug { + inner: RustDebug::new(enabled), + } + } + + /// Create a Debug instance with debug enabled. + /// + /// Returns: + /// Debug instance with debug enabled + #[staticmethod] + fn enabled() -> Self { + PyDebug { + inner: RustDebug::new(true), + } + } + + /// Create a Debug instance with debug disabled. + /// + /// Returns: + /// Debug instance with debug disabled + #[staticmethod] + fn disabled() -> Self { + PyDebug { + inner: RustDebug::new(false), + } + } + + /// Check if debug output is enabled. + /// + /// Returns: + /// True if debug output is enabled, False otherwise + fn is_enabled(&self) -> bool { + self.inner.is_enabled() + } + + /// Set whether debug output is enabled. + /// + /// Args: + /// enabled: Whether to enable debug output + fn set_enabled(&mut self, enabled: bool) { + self.inner = RustDebug::new(enabled); + } + + /// Enable debug output. + fn enable(&mut self) { + self.inner = RustDebug::new(true); + } + + /// Disable debug output. + fn disable(&mut self) { + self.inner = RustDebug::new(false); + } + + /// Toggle debug output. + fn toggle(&mut self) { + self.inner = RustDebug::new(!self.inner.is_enabled()); + } + + /// String representation of the Debug instance. + fn __repr__(&self) -> String { + format!("Debug(enabled={})", self.inner.is_enabled()) + } + + /// String representation. + fn __str__(&self) -> &'static str { + if self.inner.is_enabled() { + "Debug enabled" + } else { + "Debug disabled" + } + } + + /// Boolean conversion. + fn __bool__(&self) -> bool { + self.inner.is_enabled() + } +} diff --git a/py-rattler-build/src/directories.rs b/py-rattler-build/src/directories.rs new file mode 100644 index 000000000..fec007ecb --- /dev/null +++ b/py-rattler-build/src/directories.rs @@ -0,0 +1,211 @@ +//! Python bindings for Directories +//! +//! This module provides Python wrappers for the Rust Directories struct, +//! which represents the directory structure used during package builds. + +use crate::error::RattlerBuildError; +use ::rattler_build::metadata::Directories as RustDirectories; +use pyo3::prelude::*; +use std::path::PathBuf; + +/// Python wrapper for Directories struct. +/// +/// Directories represents the various paths and directories used during +/// the conda package build process, including recipe, cache, work, host +/// and build directories. +/// +/// Examples: +/// Access directory information: +/// >>> dirs = get_build_directories() # From a build context +/// >>> print(dirs.recipe_dir) +/// >>> print(dirs.work_dir) +/// >>> print(dirs.host_prefix) +#[pyclass(name = "Directories")] +#[derive(Clone, Debug)] +pub struct PyDirectories { + pub(crate) inner: RustDirectories, +} + +#[pymethods] +impl PyDirectories { + /// Get the recipe directory path. + /// + /// The directory where the recipe is located. + /// + /// Returns: + /// Path to the recipe directory + #[getter] + fn recipe_dir(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("recipe_dir") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the recipe file path. + /// + /// The path to the recipe file itself. + /// + /// Returns: + /// Path to the recipe file + #[getter] + fn recipe_path(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("recipe_path") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the cache directory path. + /// + /// The folder where the build cache is located. + /// + /// Returns: + /// Path to the cache directory + #[getter] + fn cache_dir(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("cache_dir") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the host prefix path. + /// + /// The directory where host dependencies are installed. + /// Exposed as $PREFIX (or %PREFIX% on Windows) in the build script. + /// + /// Returns: + /// Path to the host prefix directory + #[getter] + fn host_prefix(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("host_prefix") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the build prefix path. + /// + /// The directory where build dependencies are installed. + /// Exposed as $BUILD_PREFIX (or %BUILD_PREFIX% on Windows) in the build script. + /// + /// Returns: + /// Path to the build prefix directory + #[getter] + fn build_prefix(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("build_prefix") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the work directory path. + /// + /// The directory where the source code is copied to and built from. + /// + /// Returns: + /// Path to the work directory + #[getter] + fn work_dir(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("work_dir") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the build directory path. + /// + /// The parent directory of host, build and work directories. + /// + /// Returns: + /// Path to the build directory + #[getter] + fn build_dir(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("build_dir") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// Get the output directory path. + /// + /// The output directory or local channel directory where packages are written. + /// + /// Returns: + /// Path to the output directory + #[getter] + fn output_dir(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let path: PathBuf = serde_json::from_value( + json.get("output_dir") + .cloned() + .unwrap_or(serde_json::Value::String(String::new())), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(path) + } + + /// String representation of the Directories. + fn __repr__(&self) -> PyResult { + Ok(format!( + "Directories(recipe_dir={:?}, work_dir={:?}, host_prefix={:?}, build_prefix={:?}, output_dir={:?})", + self.recipe_dir()?, + self.work_dir()?, + self.host_prefix()?, + self.build_prefix()?, + self.output_dir()? + )) + } + + /// Detailed string representation showing all paths. + fn __str__(&self) -> PyResult { + Ok(format!( + "Directories:\n Recipe dir: {:?}\n Recipe path: {:?}\n Cache dir: {:?}\n Work dir: {:?}\n Host prefix: {:?}\n Build prefix: {:?}\n Build dir: {:?}\n Output dir: {:?}", + self.recipe_dir()?, + self.recipe_path()?, + self.cache_dir()?, + self.work_dir()?, + self.host_prefix()?, + self.build_prefix()?, + self.build_dir()?, + self.output_dir()? + )) + } +} diff --git a/py-rattler-build/src/lib.rs b/py-rattler-build/src/lib.rs index d4893c3e7..635d5d262 100644 --- a/py-rattler-build/src/lib.rs +++ b/py-rattler-build/src/lib.rs @@ -33,6 +33,27 @@ use url::Url; mod error; use error::RattlerBuildError; +mod variant_config; +use variant_config::{PyPin, PyVariantConfig}; + +mod sandbox_config; +use sandbox_config::PySandboxConfig; + +mod directories; +use directories::PyDirectories; + +mod packaging_settings; +use packaging_settings::{PyArchiveType, PyPackagingConfig}; + +mod debug; +use debug::PyDebug; + +mod test_configuration; +use test_configuration::PyTestConfig; + +mod build_configuration; +use build_configuration::PyBuildConfig; + /// Execute async tasks in Python bindings with proper error handling fn run_async_task(future: F) -> PyResult where @@ -726,6 +747,15 @@ fn rattler_build<'py>(_py: Python<'py>, m: Bound<'py, PyModule>) -> PyResult<()> m.add_function(wrap_pyfunction!(upload_package_to_anaconda_py, &m).unwrap())?; m.add_function(wrap_pyfunction!(upload_packages_to_conda_forge_py, &m).unwrap())?; m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; Ok(()) } diff --git a/py-rattler-build/src/packaging_settings.rs b/py-rattler-build/src/packaging_settings.rs new file mode 100644 index 000000000..240350bdb --- /dev/null +++ b/py-rattler-build/src/packaging_settings.rs @@ -0,0 +1,293 @@ +//! Python bindings for PackagingConfig +//! +//! This module provides Python wrappers for the Rust PackagingSettings struct, +//! which controls package format and compression settings. + +use crate::error::RattlerBuildError; +use ::rattler_build::metadata::PackagingSettings as RustPackagingSettings; +use pyo3::prelude::*; +use rattler_conda_types::package::ArchiveType; + +/// Python wrapper for ArchiveType enum. +/// +/// Represents the package archive format. +/// +/// Variants: +/// TarBz2: Traditional .tar.bz2 format +/// Conda: Modern .conda format (recommended) +#[pyclass(name = "ArchiveType")] +#[derive(Clone, Debug)] +pub enum PyArchiveType { + /// Traditional tar.bz2 format (.tar.bz2) + TarBz2, + /// Modern conda format (.conda) - recommended + Conda, +} + +impl PyArchiveType { + /// Convert Python wrapper to Rust type + pub(crate) fn to_rust(&self) -> ArchiveType { + match self { + PyArchiveType::TarBz2 => ArchiveType::TarBz2, + PyArchiveType::Conda => ArchiveType::Conda, + } + } + + /// Convert Rust type to Python wrapper + pub(crate) fn from_rust(archive_type: ArchiveType) -> Self { + match archive_type { + ArchiveType::TarBz2 => PyArchiveType::TarBz2, + ArchiveType::Conda => PyArchiveType::Conda, + } + } +} + +#[pymethods] +impl PyArchiveType { + /// Get the file extension for this archive type. + /// + /// Returns: + /// ".tar.bz2" for TarBz2, ".conda" for Conda + fn extension(&self) -> &'static str { + match self { + PyArchiveType::TarBz2 => ".tar.bz2", + PyArchiveType::Conda => ".conda", + } + } + + /// String representation of the ArchiveType. + fn __repr__(&self) -> String { + match self { + PyArchiveType::TarBz2 => "ArchiveType.TarBz2".to_string(), + PyArchiveType::Conda => "ArchiveType.Conda".to_string(), + } + } + + /// String representation. + fn __str__(&self) -> &'static str { + match self { + PyArchiveType::TarBz2 => "tar.bz2", + PyArchiveType::Conda => "conda", + } + } +} + +/// Python wrapper for PackagingConfig struct. +/// +/// PackagingConfig controls the package format and compression level +/// when creating conda packages. +/// +/// Examples: +/// Create with default compression: +/// >>> settings = PackagingConfig.tar_bz2() +/// >>> settings = PackagingConfig.conda() +/// +/// Create with custom compression: +/// >>> settings = PackagingConfig(ArchiveType.Conda, compression_level=10) +/// >>> settings = PackagingConfig.tar_bz2(compression_level=9) +#[pyclass(name = "PackagingConfig")] +#[derive(Clone, Debug)] +pub struct PyPackagingConfig { + pub(crate) inner: RustPackagingSettings, +} + +#[pymethods] +impl PyPackagingConfig { + /// Create a new PackagingConfig. + /// + /// Args: + /// archive_type: The archive format (TarBz2 or Conda) + /// compression_level: Compression level (1-9 for tar.bz2, -7 to 22 for conda) + /// + /// Returns: + /// A new PackagingSettings instance + /// + /// Note: + /// - For tar.bz2: compression_level should be 1-9 (default 9) + /// - For conda: compression_level should be -7 to 22 (default 22) + /// - Higher values = better compression but slower + #[new] + #[pyo3(signature = (archive_type, compression_level=None))] + fn new(archive_type: PyArchiveType, compression_level: Option) -> PyResult { + let rust_archive_type = archive_type.to_rust(); + + // Set appropriate defaults based on archive type + let compression_level = if let Some(level) = compression_level { + level + } else { + match rust_archive_type { + ArchiveType::TarBz2 => 9, // Max compression for bzip2 + ArchiveType::Conda => 22, // Max compression for zstd + } + }; + + // Validate compression levels + match rust_archive_type { + ArchiveType::TarBz2 => { + if !(1..=9).contains(&compression_level) { + return Err(RattlerBuildError::Other(format!( + "Invalid compression level {} for tar.bz2. Must be 1-9.", + compression_level + )) + .into()); + } + } + ArchiveType::Conda => { + if !(-7..=22).contains(&compression_level) { + return Err(RattlerBuildError::Other(format!( + "Invalid compression level {} for conda. Must be -7 to 22.", + compression_level + )) + .into()); + } + } + } + + Ok(PyPackagingConfig { + inner: RustPackagingSettings { + archive_type: rust_archive_type, + compression_level, + }, + }) + } + + /// Create PackagingSettings for tar.bz2 format. + /// + /// Args: + /// compression_level: Compression level (1-9, default 9) + /// + /// Returns: + /// PackagingSettings configured for tar.bz2 + #[staticmethod] + #[pyo3(signature = (compression_level=9))] + fn tar_bz2(compression_level: i32) -> PyResult { + Self::new(PyArchiveType::TarBz2, Some(compression_level)) + } + + /// Create PackagingSettings for conda format (recommended). + /// + /// Args: + /// compression_level: Compression level (-7 to 22, default 22) + /// + /// Returns: + /// PackagingSettings configured for .conda format + #[staticmethod] + #[pyo3(signature = (compression_level=22))] + fn conda(compression_level: i32) -> PyResult { + Self::new(PyArchiveType::Conda, Some(compression_level)) + } + + /// Get the archive type. + /// + /// Returns: + /// The archive type (TarBz2 or Conda) + #[getter] + fn archive_type(&self) -> PyArchiveType { + PyArchiveType::from_rust(self.inner.archive_type) + } + + /// Set the archive type. + /// + /// Args: + /// value: The archive type to set + #[setter] + fn set_archive_type(&mut self, value: PyArchiveType) { + self.inner.archive_type = value.to_rust(); + } + + /// Get the compression level. + /// + /// Returns: + /// The compression level (1-9 for tar.bz2, -7 to 22 for conda) + #[getter] + fn compression_level(&self) -> i32 { + self.inner.compression_level + } + + /// Set the compression level. + /// + /// Args: + /// value: The compression level + /// + /// Note: + /// - For tar.bz2: must be 1-9 + /// - For conda: must be -7 to 22 + #[setter] + fn set_compression_level(&mut self, value: i32) -> PyResult<()> { + // Validate based on current archive type + match self.inner.archive_type { + ArchiveType::TarBz2 => { + if !(1..=9).contains(&value) { + return Err(RattlerBuildError::Other(format!( + "Invalid compression level {} for tar.bz2. Must be 1-9.", + value + )) + .into()); + } + } + ArchiveType::Conda => { + if !(-7..=22).contains(&value) { + return Err(RattlerBuildError::Other(format!( + "Invalid compression level {} for conda. Must be -7 to 22.", + value + )) + .into()); + } + } + } + + self.inner.compression_level = value; + Ok(()) + } + + /// Get the file extension for the current archive type. + /// + /// Returns: + /// ".tar.bz2" or ".conda" + fn extension(&self) -> &'static str { + match self.inner.archive_type { + ArchiveType::TarBz2 => ".tar.bz2", + ArchiveType::Conda => ".conda", + } + } + + /// Check if this is using the tar.bz2 format. + /// + /// Returns: + /// True if using tar.bz2 format + fn is_tar_bz2(&self) -> bool { + matches!(self.inner.archive_type, ArchiveType::TarBz2) + } + + /// Check if this is using the conda format. + /// + /// Returns: + /// True if using conda format + fn is_conda(&self) -> bool { + matches!(self.inner.archive_type, ArchiveType::Conda) + } + + /// String representation of the PackagingSettings. + fn __repr__(&self) -> String { + format!( + "PackagingSettings(archive_type={}, compression_level={})", + match self.inner.archive_type { + ArchiveType::TarBz2 => "TarBz2", + ArchiveType::Conda => "Conda", + }, + self.inner.compression_level + ) + } + + /// Detailed string representation. + fn __str__(&self) -> String { + format!( + "{} format with compression level {}", + match self.inner.archive_type { + ArchiveType::TarBz2 => "tar.bz2", + ArchiveType::Conda => "conda", + }, + self.inner.compression_level + ) + } +} diff --git a/py-rattler-build/src/sandbox_config.rs b/py-rattler-build/src/sandbox_config.rs new file mode 100644 index 000000000..f642fc601 --- /dev/null +++ b/py-rattler-build/src/sandbox_config.rs @@ -0,0 +1,271 @@ +//! Python bindings for SandboxConfig +//! +//! This module provides Python wrappers for the Rust SandboxConfiguration struct, +//! which controls build sandboxing and isolation settings. + +use crate::error::RattlerBuildError; +use ::rattler_build::script::SandboxConfiguration as RustSandboxConfiguration; +use pyo3::prelude::*; +use std::path::PathBuf; + +/// Python wrapper for SandboxConfig struct. +/// +/// SandboxConfig controls the sandboxing/isolation settings for builds, +/// including network access and filesystem permissions. +/// +/// Examples: +/// Create a basic sandbox configuration: +/// >>> config = SandboxConfig( +/// ... allow_network=False, +/// ... read=["/usr", "/etc"], +/// ... read_execute=["/bin", "/usr/bin"], +/// ... read_write=["/tmp"] +/// ... ) +/// +/// Use platform defaults: +/// >>> macos_config = SandboxConfig.for_macos() +/// >>> linux_config = SandboxConfig.for_linux() +#[pyclass(name = "SandboxConfig")] +#[derive(Clone, Debug)] +pub struct PySandboxConfig { + pub(crate) inner: RustSandboxConfiguration, +} + +#[pymethods] +impl PySandboxConfig { + /// Create a new SandboxConfig. + /// + /// Args: + /// allow_network: Whether to allow network access during the build + /// read: List of paths that can be read + /// read_execute: List of paths that can be read and executed + /// read_write: List of paths that can be read and written + /// + /// Returns: + /// A new SandboxConfiguration instance + #[new] + #[pyo3(signature = (allow_network=false, read=None, read_execute=None, read_write=None))] + fn new( + allow_network: bool, + read: Option>, + read_execute: Option>, + read_write: Option>, + ) -> Self { + // Since RustSandboxConfiguration fields are private, we use serde + let config = serde_json::json!({ + "allow_network": allow_network, + "read": read.unwrap_or_default(), + "read_execute": read_execute.unwrap_or_default(), + "read_write": read_write.unwrap_or_default(), + }); + + let inner: RustSandboxConfiguration = + serde_json::from_value(config).expect("Failed to create SandboxConfiguration"); + + PySandboxConfig { inner } + } + + /// Get the allow_network setting. + /// + /// Returns: + /// True if network access is allowed, False otherwise + #[getter] + fn allow_network(&self) -> PyResult { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + Ok(json + .get("allow_network") + .and_then(|v| v.as_bool()) + .unwrap_or(false)) + } + + /// Set the allow_network setting. + /// + /// Args: + /// value: Whether to allow network access + #[setter] + fn set_allow_network(&mut self, value: bool) -> PyResult<()> { + let mut json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + json["allow_network"] = serde_json::Value::Bool(value); + self.inner = serde_json::from_value(json) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(()) + } + + /// Get the list of read-only paths. + /// + /// Returns: + /// List of paths that can be read + #[getter] + fn read(&self) -> PyResult> { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let paths: Vec = serde_json::from_value( + json.get("read") + .cloned() + .unwrap_or(serde_json::Value::Array(vec![])), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(paths) + } + + /// Set the list of read-only paths. + /// + /// Args: + /// value: List of paths that can be read + #[setter] + fn set_read(&mut self, value: Vec) -> PyResult<()> { + let mut json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + json["read"] = serde_json::to_value(&value) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + self.inner = serde_json::from_value(json) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(()) + } + + /// Get the list of read-execute paths. + /// + /// Returns: + /// List of paths that can be read and executed + #[getter] + fn read_execute(&self) -> PyResult> { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let paths: Vec = serde_json::from_value( + json.get("read_execute") + .cloned() + .unwrap_or(serde_json::Value::Array(vec![])), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(paths) + } + + /// Set the list of read-execute paths. + /// + /// Args: + /// value: List of paths that can be read and executed + #[setter] + fn set_read_execute(&mut self, value: Vec) -> PyResult<()> { + let mut json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + json["read_execute"] = serde_json::to_value(&value) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + self.inner = serde_json::from_value(json) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(()) + } + + /// Get the list of read-write paths. + /// + /// Returns: + /// List of paths that can be read and written + #[getter] + fn read_write(&self) -> PyResult> { + let json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + let paths: Vec = serde_json::from_value( + json.get("read_write") + .cloned() + .unwrap_or(serde_json::Value::Array(vec![])), + ) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(paths) + } + + /// Set the list of read-write paths. + /// + /// Args: + /// value: List of paths that can be read and written + #[setter] + fn set_read_write(&mut self, value: Vec) -> PyResult<()> { + let mut json = serde_json::to_value(&self.inner) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + json["read_write"] = serde_json::to_value(&value) + .map_err(|e| RattlerBuildError::Other(format!("Serialization failed: {}", e)))?; + self.inner = serde_json::from_value(json) + .map_err(|e| RattlerBuildError::Other(format!("Deserialization failed: {}", e)))?; + Ok(()) + } + + /// Create a default sandbox configuration for macOS. + /// + /// This configuration includes: + /// - Network access: disabled + /// - Read access: entire filesystem + /// - Read-execute: /bin, /usr/bin + /// - Read-write: /tmp, /var/tmp, $TMPDIR + /// + /// Returns: + /// A SandboxConfiguration configured for macOS + #[staticmethod] + fn for_macos() -> Self { + PySandboxConfig { + inner: RustSandboxConfiguration::for_macos(), + } + } + + /// Create a default sandbox configuration for Linux. + /// + /// This configuration includes: + /// - Network access: disabled + /// - Read access: entire filesystem + /// - Read-execute: /bin, /usr/bin, /lib*, /usr/lib* + /// - Read-write: /tmp, /dev/shm, $TMPDIR + /// + /// Returns: + /// A SandboxConfiguration configured for Linux + #[staticmethod] + fn for_linux() -> Self { + PySandboxConfig { + inner: RustSandboxConfiguration::for_linux(), + } + } + + /// Add a path to the read-only list. + /// + /// Args: + /// path: Path to add to the read-only list + fn add_read(&mut self, path: PathBuf) -> PyResult<()> { + let mut paths = self.read()?; + paths.push(path); + self.set_read(paths) + } + + /// Add a path to the read-execute list. + /// + /// Args: + /// path: Path to add to the read-execute list + fn add_read_execute(&mut self, path: PathBuf) -> PyResult<()> { + let mut paths = self.read_execute()?; + paths.push(path); + self.set_read_execute(paths) + } + + /// Add a path to the read-write list. + /// + /// Args: + /// path: Path to add to the read-write list + fn add_read_write(&mut self, path: PathBuf) -> PyResult<()> { + let mut paths = self.read_write()?; + paths.push(path); + self.set_read_write(paths) + } + + /// String representation of the SandboxConfiguration. + fn __repr__(&self) -> PyResult { + Ok(format!( + "SandboxConfiguration(allow_network={}, read={} paths, read_execute={} paths, read_write={} paths)", + self.allow_network()?, + self.read()?.len(), + self.read_execute()?.len(), + self.read_write()?.len() + )) + } + + /// Detailed string representation. + fn __str__(&self) -> String { + format!("{}", self.inner) + } +} diff --git a/py-rattler-build/src/test_configuration.rs b/py-rattler-build/src/test_configuration.rs new file mode 100644 index 000000000..0ceb78581 --- /dev/null +++ b/py-rattler-build/src/test_configuration.rs @@ -0,0 +1,184 @@ +//! Python bindings for TestConfig +//! +//! This module provides Python wrappers for the Rust TestConfiguration struct, +//! which controls package testing settings. + +use crate::PyDebug; +use ::rattler_build::package_test::TestConfiguration as RustTestConfiguration; +use pyo3::prelude::*; +use std::path::PathBuf; + +/// Python wrapper for TestConfig struct. +/// +/// TestConfig controls the settings for testing conda packages. +/// This is a read-only wrapper that exposes configuration properties for +/// inspection. Typically created internally during test runs. +/// +/// Examples: +/// Access test configuration (from test context): +/// >>> config = get_test_config() # From test context +/// >>> print(f"Test prefix: {config.test_prefix}") +/// >>> print(f"Keep prefix: {config.keep_test_prefix}") +/// >>> print(f"Target platform: {config.target_platform}") +#[pyclass(name = "TestConfig")] +#[derive(Clone)] +pub struct PyTestConfig { + pub(crate) inner: RustTestConfiguration, +} + +#[pymethods] +impl PyTestConfig { + /// Get the test prefix directory path. + /// + /// The directory where the test environment is created. + /// + /// Returns: + /// Path to the test prefix directory + #[getter] + fn test_prefix(&self) -> PathBuf { + self.inner.test_prefix.clone() + } + + /// Get the target platform. + /// + /// The platform for which the package was built. + /// + /// Returns: + /// Target platform string, or None if not set + #[getter] + fn target_platform(&self) -> Option { + self.inner.target_platform.map(|p| p.to_string()) + } + + /// Get the host platform. + /// + /// The platform for runtime dependencies. + /// + /// Returns: + /// Host platform string, or None if not set + #[getter] + fn host_platform(&self) -> Option { + self.inner + .host_platform + .as_ref() + .map(|p| p.platform.to_string()) + } + + /// Get the current platform. + /// + /// The platform running the tests. + /// + /// Returns: + /// Current platform string + #[getter] + fn current_platform(&self) -> String { + self.inner.current_platform.platform.to_string() + } + + /// Check if test prefix should be kept after test. + /// + /// Returns: + /// True if test prefix is kept, False if it's deleted + #[getter] + fn keep_test_prefix(&self) -> bool { + self.inner.keep_test_prefix + } + + /// Get the test index to execute. + /// + /// If set, only this specific test will be run. + /// + /// Returns: + /// Test index, or None to run all tests + #[getter] + fn test_index(&self) -> Option { + self.inner.test_index + } + + /// Get the channels used for testing. + /// + /// Returns: + /// List of channel URLs as strings + #[getter] + fn channels(&self) -> Vec { + self.inner.channels.iter().map(|c| c.to_string()).collect() + } + + /// Get the channel priority. + /// + /// Returns: + /// Channel priority as a string + #[getter] + fn channel_priority(&self) -> String { + format!("{:?}", self.inner.channel_priority) + } + + /// Get the solve strategy. + /// + /// Returns: + /// Solve strategy as a string + #[getter] + fn solve_strategy(&self) -> String { + format!("{:?}", self.inner.solve_strategy) + } + + /// Get the output directory. + /// + /// The directory where test artifacts are created. + /// + /// Returns: + /// Path to the output directory + #[getter] + fn output_dir(&self) -> PathBuf { + self.inner.output_dir.clone() + } + + /// Get the debug configuration. + /// + /// Returns: + /// Debug instance indicating if debug mode is enabled + #[getter] + fn debug(&self) -> PyDebug { + PyDebug { + inner: self.inner.debug, + } + } + + /// Get the exclude_newer timestamp. + /// + /// Packages newer than this date are excluded from the solver. + /// + /// Returns: + /// ISO 8601 timestamp string, or None if not set + #[getter] + fn exclude_newer(&self) -> Option { + self.inner.exclude_newer.map(|dt| dt.to_rfc3339()) + } + + /// String representation of the TestConfig. + fn __repr__(&self) -> String { + format!( + "TestConfig(test_prefix={:?}, target_platform={:?}, keep_test_prefix={})", + self.inner.test_prefix, + self.inner.target_platform.map(|p| p.to_string()), + self.inner.keep_test_prefix + ) + } + + /// Detailed string representation. + fn __str__(&self) -> String { + format!( + "TestConfig:\n Test prefix: {:?}\n Target platform: {:?}\n Host platform: {:?}\n Keep prefix: {}\n Test index: {:?}\n Output dir: {:?}\n Debug: {}", + self.inner.test_prefix, + self.inner.target_platform.map(|p| p.to_string()), + self.inner + .host_platform + .as_ref() + .map(|p| p.platform.to_string()), + self.inner.keep_test_prefix, + self.inner.test_index, + self.inner.output_dir, + self.inner.debug.is_enabled() + ) + } +} diff --git a/py-rattler-build/src/variant_config.rs b/py-rattler-build/src/variant_config.rs new file mode 100644 index 000000000..1c3bcaeed --- /dev/null +++ b/py-rattler-build/src/variant_config.rs @@ -0,0 +1,303 @@ +use crate::PySelectorConfig; +use crate::error::RattlerBuildError; +use ::rattler_build::{ + NormalizedKey, + recipe::variable::Variable, + variant_config::{Pin as RustPin, VariantConfig as RustVariantConfig}, +}; +use pyo3::prelude::*; +use std::collections::BTreeMap; +use std::path::PathBuf; + +/// Python wrapper for Pin struct +#[pyclass(name = "PyPin")] +#[derive(Clone, Debug)] +pub struct PyPin { + pub(crate) inner: RustPin, +} + +#[pymethods] +impl PyPin { + #[new] + #[pyo3(signature = (max_pin=None, min_pin=None))] + fn new(max_pin: Option, min_pin: Option) -> Self { + PyPin { + inner: RustPin { max_pin, min_pin }, + } + } + + #[getter] + fn max_pin(&self) -> Option { + self.inner.max_pin.clone() + } + + #[setter] + fn set_max_pin(&mut self, value: Option) { + self.inner.max_pin = value; + } + + #[getter] + fn min_pin(&self) -> Option { + self.inner.min_pin.clone() + } + + #[setter] + fn set_min_pin(&mut self, value: Option) { + self.inner.min_pin = value; + } + + fn __repr__(&self) -> String { + format!( + "Pin(max_pin={:?}, min_pin={:?})", + self.inner.max_pin, self.inner.min_pin + ) + } +} + +/// Python wrapper for VariantConfig struct +#[pyclass(name = "PyVariantConfig")] +#[derive(Clone, Debug)] +pub struct PyVariantConfig { + pub(crate) inner: RustVariantConfig, +} + +#[pymethods] +impl PyVariantConfig { + #[new] + #[pyo3(signature = (pin_run_as_build=None, zip_keys=None, variants=None))] + fn new( + py: Python<'_>, + pin_run_as_build: Option>>, + zip_keys: Option>>, + variants: Option>>>, + ) -> PyResult { + // Convert pin_run_as_build from Python wrapper to Rust + let pin_run_as_build = pin_run_as_build + .map(|map| map.into_iter().map(|(k, v)| (k, v.inner.clone())).collect()); + + // Convert zip_keys from String to NormalizedKey + let zip_keys = zip_keys.map(|keys| { + keys.into_iter() + .map(|inner_vec| inner_vec.into_iter().map(NormalizedKey::from).collect()) + .collect() + }); + + // Convert variants from Python to Rust + let variants = if let Some(variant_dict) = variants { + let mut map = BTreeMap::new(); + for (key, value_list) in variant_dict { + let normalized_key = NormalizedKey::from(key); + let mut variables = Vec::new(); + + for py_value in value_list { + let json_val: serde_json::Value = pythonize::depythonize(py_value.bind(py)) + .map_err(|e| { + RattlerBuildError::Variant(format!( + "Failed to convert variant value: {}", + e + )) + })?; + + let variable = match &json_val { + serde_json::Value::String(s) => Variable::from_string(s), + serde_json::Value::Bool(b) => Variable::from(*b), + serde_json::Value::Number(n) => { + if let Some(i) = n.as_i64() { + Variable::from(i) + } else { + Variable::from_string(&n.to_string()) + } + } + _ => { + return Err(RattlerBuildError::Variant( + "Variant values must be string, bool, or number".to_string(), + ) + .into()); + } + }; + variables.push(variable); + } + map.insert(normalized_key, variables); + } + map + } else { + BTreeMap::new() + }; + + Ok(PyVariantConfig { + inner: RustVariantConfig { + pin_run_as_build, + zip_keys, + variants, + }, + }) + } + + #[getter] + fn pin_run_as_build(&self) -> Option> { + self.inner.pin_run_as_build.as_ref().map(|map| { + map.iter() + .map(|(k, v)| (k.clone(), PyPin { inner: v.clone() })) + .collect() + }) + } + + #[setter] + fn set_pin_run_as_build(&mut self, value: Option>>) { + self.inner.pin_run_as_build = + value.map(|map| map.into_iter().map(|(k, v)| (k, v.inner.clone())).collect()); + } + + #[getter] + fn zip_keys(&self) -> Option>> { + self.inner.zip_keys.as_ref().map(|keys| { + keys.iter() + .map(|inner_vec| inner_vec.iter().map(|nk| nk.normalize()).collect()) + .collect() + }) + } + + #[setter] + fn set_zip_keys(&mut self, value: Option>>) { + self.inner.zip_keys = value.map(|keys| { + keys.into_iter() + .map(|inner_vec| inner_vec.into_iter().map(NormalizedKey::from).collect()) + .collect() + }); + } + + #[getter] + fn variants(&self, py: Python<'_>) -> PyResult> { + let json_variants: BTreeMap> = self + .inner + .variants + .iter() + .map(|(k, v)| { + let values: Result, RattlerBuildError> = v + .iter() + .map(|var| serde_json::to_value(var).map_err(RattlerBuildError::from)) + .collect(); + values.map(|vals| (k.normalize(), vals)) + }) + .collect::, RattlerBuildError>>()?; + + Ok(pythonize::pythonize(py, &json_variants) + .map(|obj| obj.into()) + .map_err(|e| { + RattlerBuildError::Variant(format!("Failed to convert variants to Python: {}", e)) + })?) + } + + #[setter] + fn set_variants( + &mut self, + py: Python<'_>, + value: BTreeMap>>, + ) -> PyResult<()> { + let mut map = BTreeMap::new(); + for (key, value_list) in value { + let normalized_key = NormalizedKey::from(key); + let mut variables = Vec::new(); + + for py_value in value_list { + let json_val: serde_json::Value = pythonize::depythonize(py_value.bind(py)) + .map_err(|e| { + RattlerBuildError::Variant(format!( + "Failed to convert variant value: {}", + e + )) + })?; + + let variable = match &json_val { + serde_json::Value::String(s) => Variable::from_string(s), + serde_json::Value::Bool(b) => Variable::from(*b), + serde_json::Value::Number(n) => { + if let Some(i) = n.as_i64() { + Variable::from(i) + } else { + Variable::from_string(&n.to_string()) + } + } + _ => { + return Err(RattlerBuildError::Variant( + "Variant values must be string, bool, or number".to_string(), + ) + .into()); + } + }; + variables.push(variable); + } + map.insert(normalized_key, variables); + } + self.inner.variants = map; + Ok(()) + } + + fn __repr__(&self) -> String { + format!( + "VariantConfig(pin_run_as_build={:?}, zip_keys={:?}, variants_keys={:?})", + self.inner + .pin_run_as_build + .as_ref() + .map(|m| m.keys().collect::>()), + self.inner.zip_keys, + self.inner.variants.keys().collect::>() + ) + } + + /// Load a VariantConfig from a single file. + /// + /// This function loads a single variant configuration file. The file can be + /// either a variant config file (variants.yaml) or a conda-build config file + /// (conda_build_config.yaml). The target_platform and build_platform are + /// automatically inserted into the variants. + /// + /// Args: + /// path: Path to variant configuration file + /// selector_config: SelectorConfig to use for rendering + /// + /// Returns: + /// A new PyVariantConfig with the configuration from the file + #[staticmethod] + fn from_file(path: PathBuf, selector_config: &PySelectorConfig) -> PyResult { + let config = RustVariantConfig::from_file(&path, &selector_config.inner).map_err(|e| { + RattlerBuildError::Variant(format!("Failed to load variant config: {:?}", e)) + })?; + + Ok(PyVariantConfig { inner: config }) + } + + /// Load a VariantConfig from a list of files. + /// + /// This function loads multiple variant configuration files and merges them + /// into a single configuration. Files can be either variant config files + /// (variants.yaml) or conda-build config files (conda_build_config.yaml). + /// + /// Args: + /// files: List of paths to variant configuration files + /// selector_config: SelectorConfig to use for rendering + /// + /// Returns: + /// A new PyVariantConfig with the merged configuration + #[staticmethod] + fn from_files(files: Vec, selector_config: &PySelectorConfig) -> PyResult { + let config = + RustVariantConfig::from_files(&files, &selector_config.inner).map_err(|e| { + RattlerBuildError::Variant(format!("Failed to load variant config: {:?}", e)) + })?; + + Ok(PyVariantConfig { inner: config }) + } + + /// Merge another VariantConfig into this one. + /// + /// - Variants are extended (keys from `other` replace keys in `self`) + /// - pin_run_as_build entries are extended + /// - zip_keys are replaced (not merged) + /// + /// Args: + /// other: Another PyVariantConfig to merge into this one + fn merge(&mut self, other: &PyVariantConfig) { + self.inner.merge(other.inner.clone()); + } +} diff --git a/py-rattler-build/tests/data/variant_configs/conda_build_config.yaml b/py-rattler-build/tests/data/variant_configs/conda_build_config.yaml new file mode 100644 index 000000000..2216581ef --- /dev/null +++ b/py-rattler-build/tests/data/variant_configs/conda_build_config.yaml @@ -0,0 +1,13 @@ +python: + - "3.9" + - "3.10" + +cuda_compiler_version: + - "11.8" + - "12.0" + +pin_run_as_build: + python: + max_pin: x.x + numpy: + max_pin: x.x diff --git a/py-rattler-build/tests/data/variant_configs/override_variants.yaml b/py-rattler-build/tests/data/variant_configs/override_variants.yaml new file mode 100644 index 000000000..f0e5119bd --- /dev/null +++ b/py-rattler-build/tests/data/variant_configs/override_variants.yaml @@ -0,0 +1,6 @@ +python: + - "3.12" + +rust: + - "1.70" + - "1.71" diff --git a/py-rattler-build/tests/data/variant_configs/simple_variants.yaml b/py-rattler-build/tests/data/variant_configs/simple_variants.yaml new file mode 100644 index 000000000..7d976b791 --- /dev/null +++ b/py-rattler-build/tests/data/variant_configs/simple_variants.yaml @@ -0,0 +1,9 @@ +python: + - "3.9" + - "3.10" + - "3.11" + +numpy: + - "1.21" + - "1.22" + - "1.23" diff --git a/py-rattler-build/tests/data/variant_configs/with_zip_keys.yaml b/py-rattler-build/tests/data/variant_configs/with_zip_keys.yaml new file mode 100644 index 000000000..271ddcb7f --- /dev/null +++ b/py-rattler-build/tests/data/variant_configs/with_zip_keys.yaml @@ -0,0 +1,12 @@ +python: + - "3.9" + - "3.10" + - "3.11" + +numpy: + - "1.21" + - "1.22" + - "1.23" + +zip_keys: + - [python, numpy] diff --git a/py-rattler-build/tests/unit/test_build_configuration.py b/py-rattler-build/tests/unit/test_build_configuration.py new file mode 100644 index 000000000..51a8fb96d --- /dev/null +++ b/py-rattler-build/tests/unit/test_build_configuration.py @@ -0,0 +1,384 @@ +"""Test suite for BuildConfig. + +Note: BuildConfig is typically created internally during builds, +so these tests document the expected interface rather than test construction. +""" + + +class TestBuildConfigInterface: + """Test suite for BuildConfig interface. + + Note: Since BuildConfig objects are created internally during builds, + we can't directly instantiate them in tests. These tests document the expected + interface and can be run when BuildConfig objects are available from actual builds. + """ + + def test_build_configuration_interface(self) -> None: + """Document the expected interface for BuildConfig.""" + # This is a documentation test showing the expected interface + # In actual usage, you would get a BuildConfig object from a build + + # Expected properties (all read-only): + expected_properties = [ + "target_platform", # str + "host_platform", # Dict[str, Any] + "build_platform", # Dict[str, Any] + "variant", # Dict[str, Any] + "hash", # str + "directories", # Directories + "channels", # List[str] + "channel_priority", # str + "solve_strategy", # str + "timestamp", # str (ISO 8601) + "subpackages", # Dict[str, Dict[str, Any]] + "packaging_settings", # PackagingConfig + "store_recipe", # bool + "force_colors", # bool + "sandbox_config", # Optional[SandboxConfig] + "debug", # Debug + "exclude_newer", # Optional[str] + ] + + # Expected methods: + expected_methods = [ + "cross_compilation", # () -> bool + "target_platform_name", # () -> str + "host_platform_name", # () -> str + "build_platform_name", # () -> str + ] + + assert len(expected_properties) == 17 + assert len(expected_methods) == 4 + + def test_build_configuration_documentation(self) -> None: + """Document how BuildConfig is used in practice.""" + # This test documents usage patterns + + # Example usage (pseudo-code): + # During a package build, you might receive a BuildConfig object: + # config = build_context.configuration + + # Access various properties: + # target = config.target_platform # "linux-64" + # hash_str = config.hash # "h1234567_0" + # dirs = config.directories # Directories object + # channels = config.channels # List of channel URLs + # variant = config.variant # {"python": "3.11", "numpy": "1.21"} + + # Check cross-compilation: + # if config.cross_compilation(): + # print(f"Cross-compiling from {config.build_platform_name()} to {config.target_platform_name()}") + + # Access nested configuration: + # debug = config.debug # Debug object + # if debug: + # print("Debug mode enabled") + + # packaging = config.packaging_settings # PackagingConfig object + # print(f"Using {packaging.extension()} format") + pass + + +class TestBuildConfigSemantics: + """Test the semantic meaning of BuildConfig properties.""" + + def test_property_purposes(self) -> None: + """Document the purpose of each property.""" + purposes = { + "target_platform": "Platform the package is being built for", + "host_platform": "Platform where the package will run (with virtual packages)", + "build_platform": "Platform on which the build is running (with virtual packages)", + "variant": "Selected variant configuration (e.g., python version, numpy version)", + "hash": "Computed hash of the variant configuration", + "directories": "Build directory structure (work_dir, host_prefix, etc.)", + "channels": "Channels used for resolving dependencies", + "channel_priority": "Strategy for channel priority (Strict/Flexible)", + "solve_strategy": "Strategy for dependency resolution", + "timestamp": "Build timestamp in ISO 8601 format", + "subpackages": "All subpackages from this output or other outputs", + "packaging_settings": "Package format and compression settings", + "store_recipe": "Whether to store recipe in the package", + "force_colors": "Whether to force colors in build script output", + "sandbox_config": "Sandbox configuration (if sandboxing is enabled)", + "debug": "Debug mode configuration", + "exclude_newer": "Exclude packages newer than this timestamp", + } + + assert len(purposes) == 17 + + def test_platform_relationships(self) -> None: + """Document relationships between platform properties.""" + # target_platform: The platform the package is being built for (simple string) + # host_platform: The platform for runtime dependencies (dict with virtual packages) + # build_platform: The platform running the build (dict with virtual packages) + + # For cross-compilation: + # - target_platform might be "linux-aarch64" + # - host_platform might be {"platform": "linux-aarch64", "virtual_packages": [...]} + # - build_platform might be {"platform": "linux-x86_64", "virtual_packages": [...]} + # - cross_compilation() would return True + + # For native builds: + # - All three platforms would typically be the same + # - cross_compilation() would return False + + # Platform name methods: + # - target_platform_name() returns the string directly + # - host_platform_name() extracts just the platform string + # - build_platform_name() extracts just the platform string + pass + + def test_variant_and_hash(self) -> None: + """Document variant and hash relationship.""" + # variant: Dictionary of variant values + # Example: {"python": "3.11", "numpy": "1.21", "c_compiler": "gcc"} + + # hash: Computed hash of the variant + # Example: "h1234567_0" + # - Used in package build string + # - Ensures different variants produce different packages + pass + + def test_directory_structure(self) -> None: + """Document the directories property.""" + # directories: Directories object with all build paths + # - recipe_dir: Directory containing the recipe + # - recipe_path: Path to the recipe file + # - cache_dir: Build cache directory + # - host_prefix: Installation prefix for host dependencies ($PREFIX) + # - build_prefix: Installation prefix for build dependencies ($BUILD_PREFIX) + # - work_dir: Source/build working directory + # - build_dir: Parent directory for the build + # - output_dir: Directory for package output + pass + + +class TestBuildConfigUsageExamples: + """Examples of how BuildConfig would be used in real scenarios.""" + + def test_inspecting_build_configuration(self) -> None: + """Example: Inspecting build configuration.""" + # def inspect_build_config(config: BuildConfig): + # print(f"Build Configuration:") + # print(f" Target: {config.target_platform}") + # print(f" Hash: {config.hash}") + # print(f" Channels: {', '.join(config.channels)}") + # print(f" Cross-compilation: {config.cross_compilation()}") + # print(f" Store recipe: {config.store_recipe}") + pass + + def test_conditional_build_logic(self) -> None: + """Example: Conditional logic based on build configuration.""" + # def run_platform_specific_build(config: BuildConfig): + # if config.target_platform == "linux-64": + # # Linux-specific build steps + # pass + # elif config.target_platform == "osx-arm64": + # # macOS ARM build steps + # pass + # + # if config.cross_compilation(): + # # Set up cross-compilation toolchain + # pass + pass + + def test_accessing_variant_values(self) -> None: + """Example: Using variant values in build.""" + # def build_with_variants(config: BuildConfig): + # variant = config.variant + # python_version = variant.get("python", "3.11") + # numpy_version = variant.get("numpy", "1.21") + # + # print(f"Building for Python {python_version}, NumPy {numpy_version}") + # + # # Use variant values in build commands + # # build_command = f"python{python_version} setup.py build" + pass + + def test_working_with_directories(self) -> None: + """Example: Using directories in build.""" + # def custom_build_step(config: BuildConfig): + # dirs = config.directories + # + # # Access work directory + # work_dir = dirs.work_dir + # print(f"Building in: {work_dir}") + # + # # Access install prefix + # prefix = dirs.host_prefix + # print(f"Installing to: {prefix}") + # + # # Access output directory + # output = dirs.output_dir + # print(f"Package will be in: {output}") + pass + + def test_sandbox_configuration(self) -> None: + """Example: Working with sandbox configuration.""" + # def check_sandbox(config: BuildConfig): + # if config.sandbox_config: + # sandbox = config.sandbox_config + # if sandbox.allow_network: + # print("Network access allowed in sandbox") + # else: + # print("Network access restricted") + # else: + # print("Sandboxing not configured") + pass + + def test_packaging_settings(self) -> None: + """Example: Inspecting packaging settings.""" + # def check_packaging(config: BuildConfig): + # packaging = config.packaging_settings + # + # if packaging.is_conda(): + # print(f"Using conda format with compression level {packaging.compression_level}") + # else: + # print(f"Using tar.bz2 format with compression level {packaging.compression_level}") + # + # print(f"Output extension: {packaging.extension()}") + pass + + +class TestBuildConfigIntegration: + """Integration scenarios with BuildConfig.""" + + def test_full_build_workflow(self) -> None: + """Document a typical build workflow using BuildConfig.""" + # 1. BuildConfig is created internally during package build + # 2. Build script has access to config via environment or build context + # 3. Script can query platforms, variants, directories + # 4. Build proceeds with appropriate settings + # 5. Package is created using packaging_settings + # 6. Recipe is optionally stored if store_recipe is True + pass + + def test_cross_platform_build(self) -> None: + """Document cross-platform build with BuildConfig.""" + # For cross-platform builds: + # - config.build_platform is the current machine + # - config.target_platform is the target architecture + # - config.host_platform may differ from target (for noarch) + # - config.cross_compilation() returns True + # - Appropriate cross-compilation tools are used + pass + + def test_variant_matrix_build(self) -> None: + """Document building with variant matrix.""" + # When building with multiple variants: + # - Each variant combination gets its own BuildConfig + # - config.variant contains the specific combination + # - config.hash uniquely identifies the variant + # - Multiple packages are produced, one per variant + pass + + def test_multi_output_build(self) -> None: + """Document multi-output builds with subpackages.""" + # For recipes with multiple outputs: + # - Each output has its own BuildConfig + # - config.subpackages contains info about all outputs + # - Subpackages can depend on each other + # - All outputs share the same variant configuration + pass + + +class TestBuildConfigStringRepresentation: + """Test string representation methods.""" + + def test_repr_format(self) -> None: + """Document expected __repr__ format.""" + # Expected format: + # BuildConfig(target_platform='linux-64', hash='h1234567_0', cross_compilation=False) + pass + + def test_str_format(self) -> None: + """Document expected __str__ format.""" + # Expected format (detailed): + # BuildConfig: + # Target: linux-64 + # Host: linux-64 + # Build: linux-64 + # Hash: h1234567_0 + # Cross-compilation: False + # Channels: 3 + # Debug: False + pass + + +class TestBuildConfigPlatformMethods: + """Test platform-related methods.""" + + def test_cross_compilation_method(self) -> None: + """Document cross_compilation() method.""" + # cross_compilation() returns True if target != build platform + # + # Examples: + # - Building on linux-64 for linux-64: False + # - Building on linux-64 for linux-aarch64: True + # - Building on osx-arm64 for osx-64: True + pass + + def test_platform_name_methods(self) -> None: + """Document platform name extraction methods.""" + # target_platform_name(): Returns target platform as string + # host_platform_name(): Extracts platform from host_platform dict + # build_platform_name(): Extracts platform from build_platform dict + # + # These are convenient when you only need the platform string, + # not the full platform info with virtual packages + pass + + +# Note: To test actual BuildConfig objects, you would need to: +# 1. Create a test recipe +# 2. Run the build +# 3. Access the BuildConfig from the build context +# 4. Test property access and values +# +# Example test that would work with an actual BuildConfig object: +# +# def test_with_real_build_configuration(build_config: BuildConfig): +# """Test with an actual BuildConfig object.""" +# # Test that properties are accessible +# assert isinstance(build_config.target_platform, str) +# assert build_config.target_platform in ["linux-64", "osx-64", "osx-arm64", "win-64"] +# +# # Test platform properties +# assert isinstance(build_config.host_platform, dict) +# assert "platform" in build_config.host_platform +# assert "virtual_packages" in build_config.host_platform +# +# # Test hash +# assert isinstance(build_config.hash, str) +# assert len(build_config.hash) > 0 +# +# # Test variant +# assert isinstance(build_config.variant, dict) +# +# # Test channels +# assert isinstance(build_config.channels, list) +# assert all(isinstance(c, str) for c in build_config.channels) +# +# # Test directories +# from rattler_build import Directories +# assert isinstance(build_config.directories, Directories) +# +# # Test cross_compilation method +# assert isinstance(build_config.cross_compilation(), bool) +# +# # Test platform name methods +# assert isinstance(build_config.target_platform_name(), str) +# assert isinstance(build_config.host_platform_name(), str) +# assert isinstance(build_config.build_platform_name(), str) +# +# # Test packaging settings +# from rattler_build import PackagingConfig +# assert isinstance(build_config.packaging_settings, PackagingConfig) +# +# # Test debug +# from rattler_build import Debug +# assert isinstance(build_config.debug, Debug) +# +# # Test boolean properties +# assert isinstance(build_config.store_recipe, bool) +# assert isinstance(build_config.force_colors, bool) diff --git a/py-rattler-build/tests/unit/test_debug.py b/py-rattler-build/tests/unit/test_debug.py new file mode 100644 index 000000000..4b039573e --- /dev/null +++ b/py-rattler-build/tests/unit/test_debug.py @@ -0,0 +1,253 @@ +"""Test suite for Debug.""" + +from rattler_build import Debug + + +class TestDebugCreation: + """Test suite for Debug creation.""" + + def test_create_default(self) -> None: + """Test creating Debug with default value (disabled).""" + debug = Debug() + assert not debug.is_enabled() + + def test_create_enabled(self) -> None: + """Test creating Debug with enabled=True.""" + debug = Debug(True) + assert debug.is_enabled() + + def test_create_disabled(self) -> None: + """Test creating Debug with enabled=False.""" + debug = Debug(False) + assert not debug.is_enabled() + + def test_factory_enabled(self) -> None: + """Test Debug.enabled() factory method.""" + debug = Debug.enabled() + assert debug.is_enabled() + + def test_factory_disabled(self) -> None: + """Test Debug.disabled() factory method.""" + debug = Debug.disabled() + assert not debug.is_enabled() + + +class TestDebugModification: + """Test suite for modifying Debug state.""" + + def test_set_enabled_true(self) -> None: + """Test setting debug to enabled.""" + debug = Debug(False) + assert not debug.is_enabled() + debug.set_enabled(True) + assert debug.is_enabled() + + def test_set_enabled_false(self) -> None: + """Test setting debug to disabled.""" + debug = Debug(True) + assert debug.is_enabled() + debug.set_enabled(False) + assert not debug.is_enabled() + + def test_enable(self) -> None: + """Test enable() method.""" + debug = Debug(False) + debug.enable() + assert debug.is_enabled() + + def test_disable(self) -> None: + """Test disable() method.""" + debug = Debug(True) + debug.disable() + assert not debug.is_enabled() + + def test_toggle_from_disabled(self) -> None: + """Test toggle() from disabled to enabled.""" + debug = Debug(False) + debug.toggle() + assert debug.is_enabled() + + def test_toggle_from_enabled(self) -> None: + """Test toggle() from enabled to disabled.""" + debug = Debug(True) + debug.toggle() + assert not debug.is_enabled() + + def test_toggle_twice(self) -> None: + """Test toggle() twice returns to original state.""" + debug = Debug(True) + debug.toggle() + debug.toggle() + assert debug.is_enabled() + + +class TestDebugBooleanBehavior: + """Test suite for boolean behavior of Debug.""" + + def test_bool_when_enabled(self) -> None: + """Test that enabled Debug evaluates to True.""" + debug = Debug(True) + assert bool(debug) + assert debug # Direct boolean check + + def test_bool_when_disabled(self) -> None: + """Test that disabled Debug evaluates to False.""" + debug = Debug(False) + assert not bool(debug) + assert not debug # Direct boolean check + + def test_if_statement_enabled(self) -> None: + """Test using Debug in if statement when enabled.""" + debug = Debug(True) + result = "enabled" if debug else "disabled" + assert result == "enabled" + + def test_if_statement_disabled(self) -> None: + """Test using Debug in if statement when disabled.""" + debug = Debug(False) + result = "enabled" if debug else "disabled" + assert result == "disabled" + + +class TestDebugStringRepresentation: + """Test suite for string representations.""" + + def test_repr_enabled(self) -> None: + """Test __repr__ when debug is enabled.""" + debug = Debug(True) + repr_str = repr(debug) + assert "Debug" in repr_str + assert "True" in repr_str or "enabled=True" in repr_str + + def test_repr_disabled(self) -> None: + """Test __repr__ when debug is disabled.""" + debug = Debug(False) + repr_str = repr(debug) + assert "Debug" in repr_str + assert "False" in repr_str or "enabled=False" in repr_str + + def test_str_enabled(self) -> None: + """Test __str__ when debug is enabled.""" + debug = Debug(True) + str_repr = str(debug) + assert "enabled" in str_repr.lower() + + def test_str_disabled(self) -> None: + """Test __str__ when debug is disabled.""" + debug = Debug(False) + str_repr = str(debug) + assert "disabled" in str_repr.lower() + + +class TestDebugIntegration: + """Integration tests for Debug.""" + + def test_workflow_enable_disable(self) -> None: + """Test a workflow of enabling and disabling debug.""" + # Start disabled + debug = Debug() + assert not debug.is_enabled() + + # Enable for debugging + debug.enable() + assert debug.is_enabled() + + # Disable after debugging + debug.disable() + assert not debug.is_enabled() + + def test_workflow_toggle(self) -> None: + """Test a workflow using toggle.""" + debug = Debug.disabled() + + # Toggle to enable + debug.toggle() + assert debug.is_enabled() + + # Toggle to disable + debug.toggle() + assert not debug.is_enabled() + + def test_conditional_workflow(self) -> None: + """Test using Debug in conditional workflow.""" + debug = Debug(True) + + messages = [] + if debug: + messages.append("Debug output enabled") + + assert len(messages) == 1 + assert messages[0] == "Debug output enabled" + + def test_state_persistence(self) -> None: + """Test that Debug state persists correctly.""" + debug = Debug(True) + assert debug.is_enabled() + + # State should persist + assert debug.is_enabled() + assert debug.is_enabled() + + debug.disable() + assert not debug.is_enabled() + assert not debug.is_enabled() + + +class TestDebugEdgeCases: + """Test edge cases for Debug.""" + + def test_multiple_enables(self) -> None: + """Test that multiple enables don't cause issues.""" + debug = Debug(False) + debug.enable() + debug.enable() + debug.enable() + assert debug.is_enabled() + + def test_multiple_disables(self) -> None: + """Test that multiple disables don't cause issues.""" + debug = Debug(True) + debug.disable() + debug.disable() + debug.disable() + assert not debug.is_enabled() + + def test_set_same_value(self) -> None: + """Test setting Debug to its current value.""" + debug = Debug(True) + debug.set_enabled(True) + assert debug.is_enabled() + + debug = Debug(False) + debug.set_enabled(False) + assert not debug.is_enabled() + + def test_many_toggles(self) -> None: + """Test many consecutive toggles.""" + debug = Debug(False) + + for i in range(100): + debug.toggle() + expected = (i + 1) % 2 == 1 # Odd iterations should be enabled + assert debug.is_enabled() == expected + + +class TestDebugComparison: + """Test comparison and equality.""" + + def test_boolean_comparison(self) -> None: + """Test comparing Debug with boolean values.""" + enabled = Debug(True) + disabled = Debug(False) + + assert enabled # Truthy + assert not disabled # Falsy + + def test_state_comparison(self) -> None: + """Test comparing Debug states.""" + debug1 = Debug(True) + debug2 = Debug(True) + debug3 = Debug(False) + + assert debug1.is_enabled() == debug2.is_enabled() + assert debug1.is_enabled() != debug3.is_enabled() diff --git a/py-rattler-build/tests/unit/test_directories.py b/py-rattler-build/tests/unit/test_directories.py new file mode 100644 index 000000000..8b0354f1d --- /dev/null +++ b/py-rattler-build/tests/unit/test_directories.py @@ -0,0 +1,223 @@ +"""Test suite for Directories. + +Note: Directories is typically created internally during builds, so these tests +focus on property access rather than creation. +""" + + +class TestDirectoriesProperties: + """Test suite for Directories property access. + + Note: Since Directories objects are created internally during the build process, + we can't directly instantiate them in tests. These tests document the expected + interface and can be run when Directories objects are available from actual builds. + """ + + def test_directories_interface(self) -> None: + """Document the expected interface for Directories.""" + # This is a documentation test showing the expected interface + # In actual usage, you would get a Directories object from a build context + + # Expected properties: + expected_properties = [ + "recipe_dir", + "recipe_path", + "cache_dir", + "host_prefix", + "build_prefix", + "work_dir", + "build_dir", + "output_dir", + ] + + # All properties should return Path objects + assert all(isinstance(prop, str) for prop in expected_properties) + + def test_directories_documentation(self) -> None: + """Document how Directories is used in practice.""" + # This test documents usage patterns + + # Example usage (pseudo-code): + # During a build, you might receive a Directories object: + # dirs = build_context.directories + + # Access various paths: + # recipe_dir = dirs.recipe_dir # Path to recipe directory + # work_dir = dirs.work_dir # Path to work directory + # host_prefix = dirs.host_prefix # Path to host prefix ($PREFIX) + # build_prefix = dirs.build_prefix # Path to build prefix ($BUILD_PREFIX) + # output_dir = dirs.output_dir # Path to output directory + + # All paths are PathBuf (pathlib.Path in Python) + pass + + +class TestDirectoriesSemantics: + """Test the semantic meaning of each directory.""" + + def test_directory_purposes(self) -> None: + """Document the purpose of each directory.""" + purposes = { + "recipe_dir": "Directory containing the recipe file", + "recipe_path": "Full path to the recipe file itself", + "cache_dir": "Build cache directory for downloaded sources, etc.", + "host_prefix": "Installation prefix for host dependencies ($PREFIX)", + "build_prefix": "Installation prefix for build dependencies ($BUILD_PREFIX)", + "work_dir": "Working directory where source is extracted and built", + "build_dir": "Parent directory containing host, build, and work dirs", + "output_dir": "Output directory where final packages are written", + } + + assert len(purposes) == 8 + assert "host_prefix" in purposes + assert "build_prefix" in purposes + assert "work_dir" in purposes + + def test_environment_variable_mapping(self) -> None: + """Document which directories map to environment variables.""" + env_mappings = { + "host_prefix": "$PREFIX or %PREFIX%", + "build_prefix": "$BUILD_PREFIX or %BUILD_PREFIX%", + } + + assert len(env_mappings) == 2 + + def test_directory_relationships(self) -> None: + """Document relationships between directories.""" + # build_dir is the parent of: + # - host_prefix + # - build_prefix + # - work_dir + + # output_dir is independent and contains the final packages + + # This is a documentation test + assert True + + +class TestDirectoriesUsageExamples: + """Examples of how Directories would be used in real scenarios.""" + + def test_accessing_paths_example(self) -> None: + """Example: Accessing paths from a Directories object.""" + # In a real build scenario: + # def my_build_function(directories: Directories): + # # Get the work directory + # work_dir = directories.work_dir + # print(f"Building in: {work_dir}") + # + # # Get the installation prefix + # prefix = directories.host_prefix + # print(f"Installing to: {prefix}") + # + # # Get the output directory + # output = directories.output_dir + # print(f"Package will be written to: {output}") + pass + + def test_path_manipulation_example(self) -> None: + """Example: Using paths for file operations.""" + # In a real build scenario: + # def install_files(directories: Directories): + # # Install to the host prefix + # install_dir = directories.host_prefix / "lib" / "mypackage" + # install_dir.mkdir(parents=True, exist_ok=True) + # + # # Copy from work directory + # source = directories.work_dir / "output" + # shutil.copytree(source, install_dir) + pass + + def test_build_script_environment_example(self) -> None: + """Example: How directories relate to build script environment.""" + # In the build script environment: + # - $PREFIX or %PREFIX% corresponds to directories.host_prefix + # - $BUILD_PREFIX or %BUILD_PREFIX% corresponds to directories.build_prefix + # - The build script runs in directories.work_dir + # - Sources are extracted to directories.work_dir + # - The final package is created from directories.host_prefix + # - The package file is written to directories.output_dir + pass + + +class TestDirectoriesIntegration: + """Integration scenarios with Directories.""" + + def test_typical_build_flow(self) -> None: + """Document a typical build flow using Directories.""" + # 1. Recipe is parsed from directories.recipe_path + # 2. Sources are downloaded and cached in directories.cache_dir + # 3. Sources are extracted to directories.work_dir + # 4. Build dependencies are installed to directories.build_prefix + # 5. Host dependencies are installed to directories.host_prefix + # 6. Build script runs in directories.work_dir + # 7. Build artifacts are installed to directories.host_prefix + # 8. Package is created from directories.host_prefix + # 9. Package file is written to directories.output_dir + pass + + def test_cross_compilation_scenario(self) -> None: + """Document cross-compilation with build and host prefixes.""" + # In cross-compilation: + # - directories.build_prefix contains tools that run on build platform + # - directories.host_prefix contains libraries for the target platform + # - The build script uses tools from $BUILD_PREFIX to build for $PREFIX + pass + + def test_noarch_build_scenario(self) -> None: + """Document noarch builds.""" + # For noarch packages: + # - directories.host_prefix and directories.build_prefix may be merged + # - The build is platform-independent + # - Python noarch packages install pure Python files to $PREFIX + pass + + +class TestDirectoriesStringRepresentation: + """Test string representation methods.""" + + def test_repr_format(self) -> None: + """Document expected __repr__ format.""" + # Expected format: + # Directories(recipe_dir=..., work_dir=..., host_prefix=..., build_prefix=..., output_dir=...) + pass + + def test_str_format(self) -> None: + """Document expected __str__ format.""" + # Expected format (detailed): + # Directories: + # Recipe dir: ... + # Recipe path: ... + # Cache dir: ... + # Work dir: ... + # Host prefix: ... + # Build prefix: ... + # Build dir: ... + # Output dir: ... + pass + + +# Note: To test actual Directories objects, you would need to: +# 1. Create a minimal build setup +# 2. Extract the Directories object from the build context +# 3. Test property access and values +# +# Example test that would work with an actual Directories object: +# +# def test_with_real_directories(build_directories: Directories): +# """Test with an actual Directories object from a build.""" +# # Test that all paths are absolute +# assert build_directories.recipe_dir.is_absolute() +# assert build_directories.work_dir.is_absolute() +# assert build_directories.host_prefix.is_absolute() +# assert build_directories.build_prefix.is_absolute() +# assert build_directories.output_dir.is_absolute() +# +# # Test that certain paths exist or are created +# assert build_directories.recipe_dir.exists() +# # work_dir, host_prefix, build_prefix created during build +# +# # Test path relationships +# assert build_directories.host_prefix.parent == build_directories.build_dir +# assert build_directories.build_prefix.parent == build_directories.build_dir +# assert build_directories.work_dir.parent == build_directories.build_dir diff --git a/py-rattler-build/tests/unit/test_packaging_settings.py b/py-rattler-build/tests/unit/test_packaging_settings.py new file mode 100644 index 000000000..ba5a24858 --- /dev/null +++ b/py-rattler-build/tests/unit/test_packaging_settings.py @@ -0,0 +1,339 @@ +"""Test suite for PackagingConfig.""" + +import pytest +from rattler_build import PackagingConfig, ArchiveType + + +class TestArchiveType: + """Test suite for ArchiveType enum.""" + + def test_archive_type_values(self) -> None: + """Test that ArchiveType has expected values.""" + assert hasattr(ArchiveType, "TarBz2") + assert hasattr(ArchiveType, "Conda") + + def test_archive_type_extension(self) -> None: + """Test archive type extensions.""" + # ArchiveType is an enum, access the actual PyO3 values + tar_bz2 = ArchiveType.TarBz2.value + conda = ArchiveType.Conda.value + + assert tar_bz2.extension() == ".tar.bz2" + assert conda.extension() == ".conda" + + def test_archive_type_str(self) -> None: + """Test archive type string representation.""" + tar_bz2 = ArchiveType.TarBz2.value + conda = ArchiveType.Conda.value + + assert str(tar_bz2) == "tar.bz2" + assert str(conda) == "conda" + + def test_archive_type_repr(self) -> None: + """Test archive type repr.""" + tar_bz2 = ArchiveType.TarBz2.value + conda = ArchiveType.Conda.value + + assert "TarBz2" in repr(tar_bz2) + assert "Conda" in repr(conda) + + +class TestPackagingConfigCreation: + """Test suite for PackagingConfig creation.""" + + def test_create_tar_bz2_default(self) -> None: + """Test creating tar.bz2 settings with default compression.""" + settings = PackagingConfig.tar_bz2() + assert settings.is_tar_bz2() + assert not settings.is_conda() + assert settings.compression_level == 9 + assert settings.extension() == ".tar.bz2" + + def test_create_conda_default(self) -> None: + """Test creating conda settings with default compression.""" + settings = PackagingConfig.conda() + assert settings.is_conda() + assert not settings.is_tar_bz2() + assert settings.compression_level == 22 + assert settings.extension() == ".conda" + + def test_create_tar_bz2_custom_compression(self) -> None: + """Test creating tar.bz2 settings with custom compression.""" + settings = PackagingConfig.tar_bz2(compression_level=5) + assert settings.is_tar_bz2() + assert settings.compression_level == 5 + + def test_create_conda_custom_compression(self) -> None: + """Test creating conda settings with custom compression.""" + settings = PackagingConfig.conda(compression_level=10) + assert settings.is_conda() + assert settings.compression_level == 10 + + def test_create_with_constructor_tar_bz2(self) -> None: + """Test creating settings with constructor for tar.bz2.""" + settings = PackagingConfig(ArchiveType.TarBz2.value, compression_level=7) + assert settings.is_tar_bz2() + assert settings.compression_level == 7 + + def test_create_with_constructor_conda(self) -> None: + """Test creating settings with constructor for conda.""" + settings = PackagingConfig(ArchiveType.Conda.value, compression_level=15) + assert settings.is_conda() + assert settings.compression_level == 15 + + def test_create_with_default_compression(self) -> None: + """Test that None compression_level uses appropriate defaults.""" + tar_settings = PackagingConfig(ArchiveType.TarBz2.value) + assert tar_settings.compression_level == 9 + + conda_settings = PackagingConfig(ArchiveType.Conda.value) + assert conda_settings.compression_level == 22 + + +class TestPackagingConfigValidation: + """Test suite for compression level validation.""" + + def test_tar_bz2_valid_range(self) -> None: + """Test valid compression levels for tar.bz2.""" + for level in range(1, 10): # 1-9 inclusive + settings = PackagingConfig.tar_bz2(compression_level=level) + assert settings.compression_level == level + + def test_tar_bz2_invalid_low(self) -> None: + """Test invalid low compression level for tar.bz2.""" + with pytest.raises(Exception): # RattlerBuildError + PackagingConfig.tar_bz2(compression_level=0) + + def test_tar_bz2_invalid_high(self) -> None: + """Test invalid high compression level for tar.bz2.""" + with pytest.raises(Exception): # RattlerBuildError + PackagingConfig.tar_bz2(compression_level=10) + + def test_conda_valid_range(self) -> None: + """Test valid compression levels for conda.""" + # Test some values in the range -7 to 22 + for level in [-7, -1, 0, 1, 10, 15, 20, 22]: + settings = PackagingConfig.conda(compression_level=level) + assert settings.compression_level == level + + def test_conda_invalid_low(self) -> None: + """Test invalid low compression level for conda.""" + with pytest.raises(Exception): # RattlerBuildError + PackagingConfig.conda(compression_level=-8) + + def test_conda_invalid_high(self) -> None: + """Test invalid high compression level for conda.""" + with pytest.raises(Exception): # RattlerBuildError + PackagingConfig.conda(compression_level=23) + + +class TestPackagingConfigModification: + """Test suite for modifying PackagingConfig.""" + + def test_modify_compression_level_tar_bz2(self) -> None: + """Test modifying compression level for tar.bz2.""" + settings = PackagingConfig.tar_bz2() + settings.compression_level = 5 + assert settings.compression_level == 5 + + def test_modify_compression_level_conda(self) -> None: + """Test modifying compression level for conda.""" + settings = PackagingConfig.conda() + settings.compression_level = 10 + assert settings.compression_level == 10 + + def test_modify_compression_level_validates_tar_bz2(self) -> None: + """Test that setting compression level validates for tar.bz2.""" + settings = PackagingConfig.tar_bz2() + with pytest.raises(Exception): # RattlerBuildError + settings.compression_level = 10 + + def test_modify_compression_level_validates_conda(self) -> None: + """Test that setting compression level validates for conda.""" + settings = PackagingConfig.conda() + with pytest.raises(Exception): # RattlerBuildError + settings.compression_level = 23 + + def test_change_archive_type(self) -> None: + """Test changing archive type.""" + settings = PackagingConfig.tar_bz2() + assert settings.is_tar_bz2() + + settings.archive_type = ArchiveType.Conda.value + assert settings.is_conda() + assert settings.extension() == ".conda" + + def test_change_archive_type_validates_compression(self) -> None: + """Test that changing archive type doesn't auto-validate compression.""" + # Start with conda format with compression level 15 + settings = PackagingConfig.conda(compression_level=15) + assert settings.compression_level == 15 + + # Change to tar.bz2 - compression level 15 is invalid for tar.bz2 + settings.archive_type = ArchiveType.TarBz2.value + + # The compression level should still be 15 (no auto-adjustment) + # But trying to set it again should fail + with pytest.raises(Exception): # RattlerBuildError + settings.compression_level = 15 + + +class TestPackagingConfigProperties: + """Test suite for PackagingConfig properties.""" + + def test_archive_type_property(self) -> None: + """Test archive_type property.""" + tar_settings = PackagingConfig.tar_bz2() + assert tar_settings.archive_type.extension() == ".tar.bz2" + + conda_settings = PackagingConfig.conda() + assert conda_settings.archive_type.extension() == ".conda" + + def test_compression_level_property(self) -> None: + """Test compression_level property.""" + settings = PackagingConfig.tar_bz2(compression_level=5) + assert settings.compression_level == 5 + + settings.compression_level = 7 + assert settings.compression_level == 7 + + def test_extension_method(self) -> None: + """Test extension() method.""" + assert PackagingConfig.tar_bz2().extension() == ".tar.bz2" + assert PackagingConfig.conda().extension() == ".conda" + + def test_is_tar_bz2_method(self) -> None: + """Test is_tar_bz2() method.""" + assert PackagingConfig.tar_bz2().is_tar_bz2() + assert not PackagingConfig.conda().is_tar_bz2() + + def test_is_conda_method(self) -> None: + """Test is_conda() method.""" + assert PackagingConfig.conda().is_conda() + assert not PackagingConfig.tar_bz2().is_conda() + + +class TestPackagingConfigStringRepresentation: + """Test suite for string representations.""" + + def test_repr_tar_bz2(self) -> None: + """Test __repr__ for tar.bz2.""" + settings = PackagingConfig.tar_bz2(compression_level=5) + repr_str = repr(settings) + assert "PackagingConfig" in repr_str + assert "TarBz2" in repr_str + assert "5" in repr_str + + def test_repr_conda(self) -> None: + """Test __repr__ for conda.""" + settings = PackagingConfig.conda(compression_level=15) + repr_str = repr(settings) + assert "PackagingConfig" in repr_str + assert "Conda" in repr_str + assert "15" in repr_str + + def test_str_tar_bz2(self) -> None: + """Test __str__ for tar.bz2.""" + settings = PackagingConfig.tar_bz2(compression_level=7) + str_repr = str(settings) + assert "tar.bz2" in str_repr + assert "7" in str_repr + + def test_str_conda(self) -> None: + """Test __str__ for conda.""" + settings = PackagingConfig.conda(compression_level=10) + str_repr = str(settings) + assert "conda" in str_repr + assert "10" in str_repr + + +class TestPackagingConfigIntegration: + """Integration tests for PackagingConfig.""" + + def test_fast_compression_workflow(self) -> None: + """Test workflow for fast compression.""" + # Use fast compression for development builds + settings = PackagingConfig.conda(compression_level=1) + assert settings.is_conda() + assert settings.compression_level == 1 + assert settings.extension() == ".conda" + + def test_max_compression_workflow(self) -> None: + """Test workflow for maximum compression.""" + # Use maximum compression for release builds + settings = PackagingConfig.conda(compression_level=22) + assert settings.is_conda() + assert settings.compression_level == 22 + + def test_legacy_format_workflow(self) -> None: + """Test workflow for legacy tar.bz2 format.""" + # Use tar.bz2 for compatibility + settings = PackagingConfig.tar_bz2() + assert settings.is_tar_bz2() + assert settings.compression_level == 9 + + def test_modify_for_different_use_case(self) -> None: + """Test modifying settings for different use cases.""" + # Start with fast development settings + settings = PackagingConfig.conda(compression_level=1) + + # Switch to release settings + settings.compression_level = 22 + assert settings.compression_level == 22 + + def test_format_switching(self) -> None: + """Test switching between formats.""" + settings = PackagingConfig.conda() + + # Switch to tar.bz2 + settings.archive_type = ArchiveType.TarBz2.value + settings.compression_level = 9 # Valid for tar.bz2 + + assert settings.is_tar_bz2() + assert settings.compression_level == 9 + assert settings.extension() == ".tar.bz2" + + # Switch back to conda + settings.archive_type = ArchiveType.Conda.value + settings.compression_level = 15 # Valid for conda + + assert settings.is_conda() + assert settings.compression_level == 15 + assert settings.extension() == ".conda" + + +class TestPackagingConfigEdgeCases: + """Test edge cases for PackagingConfig.""" + + def test_boundary_values_tar_bz2(self) -> None: + """Test boundary values for tar.bz2.""" + # Minimum valid + min_settings = PackagingConfig.tar_bz2(compression_level=1) + assert min_settings.compression_level == 1 + + # Maximum valid + max_settings = PackagingConfig.tar_bz2(compression_level=9) + assert max_settings.compression_level == 9 + + def test_boundary_values_conda(self) -> None: + """Test boundary values for conda.""" + # Minimum valid + min_settings = PackagingConfig.conda(compression_level=-7) + assert min_settings.compression_level == -7 + + # Maximum valid + max_settings = PackagingConfig.conda(compression_level=22) + assert max_settings.compression_level == 22 + + def test_negative_compression_conda(self) -> None: + """Test negative compression levels for conda.""" + # Negative values are valid for conda (faster, less compression) + settings = PackagingConfig.conda(compression_level=-5) + assert settings.compression_level == -5 + + def test_recommended_settings(self) -> None: + """Test recommended settings for production use.""" + # Recommended: conda format with high compression + settings = PackagingConfig.conda() # Default is 22 + assert settings.is_conda() + assert settings.compression_level == 22 diff --git a/py-rattler-build/tests/unit/test_sandbox_config.py b/py-rattler-build/tests/unit/test_sandbox_config.py new file mode 100644 index 000000000..a9ac0dcb4 --- /dev/null +++ b/py-rattler-build/tests/unit/test_sandbox_config.py @@ -0,0 +1,244 @@ +"""Test suite for SandboxConfig.""" + +from pathlib import Path +from rattler_build import SandboxConfig + + +class TestSandboxConfig: + """Test suite for SandboxConfig class.""" + + def test_create_default_config(self) -> None: + """Test creating a default SandboxConfig.""" + config = SandboxConfig() + assert config.allow_network is False + assert config.read == [] + assert config.read_execute == [] + assert config.read_write == [] + + def test_create_config_with_network(self) -> None: + """Test creating a SandboxConfig with network access.""" + config = SandboxConfig(allow_network=True) + assert config.allow_network is True + + def test_create_config_with_paths(self) -> None: + """Test creating a SandboxConfig with paths.""" + config = SandboxConfig( + read=[Path("/usr"), Path("/etc")], + read_execute=[Path("/bin"), Path("/usr/bin")], + read_write=[Path("/tmp")], + ) + assert len(config.read) == 2 + assert Path("/usr") in config.read + assert Path("/etc") in config.read + assert len(config.read_execute) == 2 + assert Path("/bin") in config.read_execute + assert Path("/usr/bin") in config.read_execute + assert len(config.read_write) == 1 + assert Path("/tmp") in config.read_write + + def test_modify_allow_network(self) -> None: + """Test modifying allow_network after creation.""" + config = SandboxConfig() + assert config.allow_network is False + config.allow_network = True + assert config.allow_network is True + + def test_modify_read_paths(self) -> None: + """Test modifying read paths after creation.""" + config = SandboxConfig() + config.read = [Path("/usr/local")] + assert len(config.read) == 1 + assert Path("/usr/local") in config.read + + def test_modify_read_execute_paths(self) -> None: + """Test modifying read_execute paths after creation.""" + config = SandboxConfig() + config.read_execute = [Path("/usr/local/bin")] + assert len(config.read_execute) == 1 + assert Path("/usr/local/bin") in config.read_execute + + def test_modify_read_write_paths(self) -> None: + """Test modifying read_write paths after creation.""" + config = SandboxConfig() + config.read_write = [Path("/var/tmp")] + assert len(config.read_write) == 1 + assert Path("/var/tmp") in config.read_write + + def test_add_read_path(self) -> None: + """Test adding a read path.""" + config = SandboxConfig() + config.add_read(Path("/usr")) + assert Path("/usr") in config.read + + def test_add_read_execute_path(self) -> None: + """Test adding a read_execute path.""" + config = SandboxConfig() + config.add_read_execute(Path("/bin")) + assert Path("/bin") in config.read_execute + + def test_add_read_write_path(self) -> None: + """Test adding a read_write path.""" + config = SandboxConfig() + config.add_read_write(Path("/tmp")) + assert Path("/tmp") in config.read_write + + def test_add_multiple_paths(self) -> None: + """Test adding multiple paths.""" + config = SandboxConfig() + config.add_read(Path("/usr")) + config.add_read(Path("/etc")) + assert len(config.read) == 2 + assert Path("/usr") in config.read + assert Path("/etc") in config.read + + def test_repr(self) -> None: + """Test string representation.""" + config = SandboxConfig(allow_network=True) + repr_str = repr(config) + assert "SandboxConfig" in repr_str + assert "allow_network=True" in repr_str + + def test_str(self) -> None: + """Test detailed string representation.""" + config = SandboxConfig(allow_network=True) + str_repr = str(config) + assert "Sandbox Configuration" in str_repr or "SandboxConfig" in str_repr + + +class TestSandboxConfigPlatformDefaults: + """Test suite for platform-specific default configurations.""" + + def test_for_macos(self) -> None: + """Test macOS default configuration.""" + config = SandboxConfig.for_macos() + assert config.allow_network is False + assert len(config.read) > 0 + assert Path("/") in config.read + assert len(config.read_execute) > 0 + # macOS should have /bin and /usr/bin + read_execute_strs = [str(p) for p in config.read_execute] + assert any("/bin" in p for p in read_execute_strs) + assert len(config.read_write) > 0 + # Should have /tmp + read_write_strs = [str(p) for p in config.read_write] + assert any("/tmp" in p for p in read_write_strs) + + def test_for_linux(self) -> None: + """Test Linux default configuration.""" + config = SandboxConfig.for_linux() + assert config.allow_network is False + assert len(config.read) > 0 + assert Path("/") in config.read + assert len(config.read_execute) > 0 + # Linux should have /bin, /usr/bin, and lib directories + read_execute_strs = [str(p) for p in config.read_execute] + assert any("/bin" in p for p in read_execute_strs) + assert len(config.read_write) > 0 + # Should have /tmp + read_write_strs = [str(p) for p in config.read_write] + assert any("/tmp" in p for p in read_write_strs) + + def test_modify_platform_defaults(self) -> None: + """Test modifying platform default configurations.""" + config = SandboxConfig.for_linux() + config.allow_network = True + assert config.allow_network is True + + config.add_read_write(Path("/my/custom/path")) + assert Path("/my/custom/path") in config.read_write + + +class TestSandboxConfigIntegration: + """Integration tests for SandboxConfig.""" + + def test_full_workflow(self) -> None: + """Test a complete workflow of creating and modifying a config.""" + # Start with platform defaults + config = SandboxConfig.for_linux() + + # Enable network for this build + config.allow_network = True + + # Add custom paths + config.add_read(Path("/opt/custom")) + config.add_read_execute(Path("/opt/custom/bin")) + config.add_read_write(Path("/workspace")) + + # Verify everything + assert config.allow_network is True + assert Path("/opt/custom") in config.read + assert Path("/opt/custom/bin") in config.read_execute + assert Path("/workspace") in config.read_write + + def test_replace_paths_completely(self) -> None: + """Test completely replacing path lists.""" + config = SandboxConfig.for_linux() + + original_read_count = len(config.read) + assert original_read_count > 0 + + # Replace with custom paths + config.read = [Path("/custom/path")] + assert len(config.read) == 1 + assert Path("/custom/path") in config.read + + def test_clear_paths(self) -> None: + """Test clearing all paths.""" + config = SandboxConfig(read=[Path("/usr")], read_execute=[Path("/bin")], read_write=[Path("/tmp")]) + + config.read = [] + config.read_execute = [] + config.read_write = [] + + assert config.read == [] + assert config.read_execute == [] + assert config.read_write == [] + + def test_realistic_build_config(self) -> None: + """Test a realistic build configuration.""" + config = SandboxConfig.for_linux() + + # Disable network (security best practice) + config.allow_network = False + + # Add project-specific paths + project_root = Path("/home/user/project") + config.add_read(project_root) + config.add_read_execute(project_root / "scripts") + config.add_read_write(project_root / "build") + + assert Path("/home/user/project") in config.read + assert Path("/home/user/project/scripts") in config.read_execute + assert Path("/home/user/project/build") in config.read_write + + +class TestSandboxConfigEdgeCases: + """Test edge cases and error handling.""" + + def test_empty_path_lists(self) -> None: + """Test with empty path lists.""" + config = SandboxConfig(read=[], read_execute=[], read_write=[]) + assert config.read == [] + assert config.read_execute == [] + assert config.read_write == [] + + def test_duplicate_paths(self) -> None: + """Test adding duplicate paths.""" + config = SandboxConfig() + config.add_read(Path("/usr")) + config.add_read(Path("/usr")) + # Should have duplicates (no deduplication) + assert len(config.read) == 2 + + def test_relative_paths(self) -> None: + """Test with relative paths.""" + config = SandboxConfig() + config.add_read(Path("relative/path")) + assert Path("relative/path") in config.read + + def test_windows_paths(self) -> None: + """Test with Windows-style paths.""" + config = SandboxConfig() + # Path should handle Windows paths on Windows + config.add_read(Path("C:/Users/test")) + assert len(config.read) == 1 diff --git a/py-rattler-build/tests/unit/test_test_configuration.py b/py-rattler-build/tests/unit/test_test_configuration.py new file mode 100644 index 000000000..2545893d2 --- /dev/null +++ b/py-rattler-build/tests/unit/test_test_configuration.py @@ -0,0 +1,245 @@ +"""Test suite for TestConfig. + +Note: TestConfig is typically created internally during test runs, +so these tests document the expected interface rather than test construction. +""" + + +class TestTestConfigInterface: + """Test suite for TestConfig interface. + + Note: Since TestConfig objects are created internally during test runs, + we can't directly instantiate them in tests. These tests document the expected + interface and can be run when TestConfig objects are available from actual tests. + """ + + def test_test_configuration_interface(self) -> None: + """Document the expected interface for TestConfig.""" + # This is a documentation test showing the expected interface + # In actual usage, you would get a TestConfig object from a test run + + # Expected properties (all read-only): + expected_properties = [ + "test_prefix", # PathBuf + "target_platform", # Option + "host_platform", # Option + "current_platform", # String + "keep_test_prefix", # bool + "test_index", # Option + "channels", # Vec + "channel_priority", # String + "solve_strategy", # String + "output_dir", # PathBuf + "debug", # Debug + "exclude_newer", # Option + ] + + assert len(expected_properties) == 12 + + def test_test_configuration_documentation(self) -> None: + """Document how TestConfig is used in practice.""" + # This test documents usage patterns + + # Example usage (pseudo-code): + # During a package test, you might receive a TestConfig object: + # config = test_context.configuration + + # Access various properties: + # test_prefix = config.test_prefix # Path where test environment is created + # target = config.target_platform # Platform package was built for + # host = config.host_platform # Platform for runtime dependencies + # keep = config.keep_test_prefix # Whether to preserve test environment + # channels = config.channels # Channels used for test dependencies + # debug = config.debug # Debug configuration + + # Check if debug mode is enabled: + # if config.debug: + # print(f"Testing in debug mode at {config.test_prefix}") + + # Access solver settings: + # priority = config.channel_priority + # strategy = config.solve_strategy + pass + + +class TestTestConfigSemantics: + """Test the semantic meaning of TestConfig properties.""" + + def test_property_purposes(self) -> None: + """Document the purpose of each property.""" + purposes = { + "test_prefix": "Directory where test environment is created", + "target_platform": "Platform the package was built for", + "host_platform": "Platform for runtime dependencies", + "current_platform": "Platform running the tests", + "keep_test_prefix": "Whether to preserve test environment after test", + "test_index": "Index of specific test to run (None = all tests)", + "channels": "Channels for resolving test dependencies", + "channel_priority": "Strategy for channel priority", + "solve_strategy": "Strategy for dependency resolution", + "output_dir": "Directory for test artifacts", + "debug": "Debug mode configuration", + "exclude_newer": "Exclude packages newer than this timestamp", + } + + assert len(purposes) == 12 + + def test_platform_relationships(self) -> None: + """Document relationships between platform properties.""" + # target_platform: The platform the package was built for + # host_platform: The platform for runtime dependencies (often same as target) + # current_platform: The platform actually running the tests + + # For cross-compilation testing: + # - target_platform might be "linux-aarch64" + # - host_platform might be "linux-aarch64" + # - current_platform might be "linux-x86_64" (using emulation) + + # For native testing: + # - All three platforms would typically be the same + pass + + def test_directory_relationships(self) -> None: + """Document relationships between directory properties.""" + # test_prefix: Where the test environment is created + # - Contains installed package and test dependencies + # - Deleted after test unless keep_test_prefix=True + + # output_dir: Where test artifacts are created + # - Typically output_dir/test + # - Contains test logs and results + pass + + +class TestTestConfigUsageExamples: + """Examples of how TestConfig would be used in real scenarios.""" + + def test_inspecting_test_environment(self) -> None: + """Example: Inspecting test environment configuration.""" + # def inspect_test_config(config: TestConfig): + # print(f"Test Environment:") + # print(f" Prefix: {config.test_prefix}") + # print(f" Target: {config.target_platform}") + # print(f" Channels: {', '.join(config.channels)}") + # print(f" Keep prefix: {config.keep_test_prefix}") + pass + + def test_conditional_test_logic(self) -> None: + """Example: Conditional logic based on test configuration.""" + # def run_platform_specific_test(config: TestConfig): + # if config.target_platform == "linux-64": + # # Run Linux-specific tests + # pass + # elif config.target_platform == "osx-arm64": + # # Run macOS ARM tests + # pass + pass + + def test_debug_mode_workflow(self) -> None: + """Example: Using debug mode in tests.""" + # def test_with_debug(config: TestConfig): + # if config.debug: + # # Enable verbose logging + # import logging + # logging.basicConfig(level=logging.DEBUG) + # + # # Print test environment details + # print(f"Testing at: {config.test_prefix}") + # print(f"Channels: {config.channels}") + pass + + def test_selective_test_execution(self) -> None: + """Example: Running specific tests.""" + # def run_tests(config: TestConfig): + # if config.test_index is not None: + # # Run only the specified test + # run_single_test(config.test_index) + # else: + # # Run all tests + # run_all_tests() + pass + + +class TestTestConfigIntegration: + """Integration scenarios with TestConfig.""" + + def test_test_workflow(self) -> None: + """Document a typical test workflow using TestConfig.""" + # 1. TestConfig is created internally during package test + # 2. Test environment is set up at config.test_prefix + # 3. Package and dependencies are installed using config.channels + # 4. Test scripts are executed + # 5. If config.keep_test_prefix is False, environment is cleaned up + # 6. Test results are written to config.output_dir + pass + + def test_cross_platform_testing(self) -> None: + """Document cross-platform testing with TestConfig.""" + # For cross-platform testing: + # - config.target_platform is the target architecture + # - config.host_platform may differ from target + # - config.current_platform is the actual test platform + # - Tests may use emulation or skip if incompatible + pass + + def test_multi_channel_resolution(self) -> None: + """Document multi-channel dependency resolution.""" + # config.channels contains the ordered list of channels + # config.channel_priority determines how conflicts are resolved: + # - "Strict": Prefer packages from higher-priority channels + # - "Flexible": Allow packages from any channel if compatible + pass + + +class TestTestConfigStringRepresentation: + """Test string representation methods.""" + + def test_repr_format(self) -> None: + """Document expected __repr__ format.""" + # Expected format: + # TestConfig(test_prefix=..., target_platform=..., keep_test_prefix=...) + pass + + def test_str_format(self) -> None: + """Document expected __str__ format.""" + # Expected format (detailed): + # TestConfig: + # Test prefix: ... + # Target platform: ... + # Host platform: ... + # Keep prefix: ... + # Test index: ... + # Output dir: ... + # Debug: ... + pass + + +# Note: To test actual TestConfig objects, you would need to: +# 1. Create a test package +# 2. Run the test suite +# 3. Access the TestConfig from the test context +# 4. Test property access and values +# +# Example test that would work with an actual TestConfig object: +# +# def test_with_real_test_configuration(test_config: TestConfig): +# """Test with an actual TestConfig object.""" +# # Test that properties are accessible +# assert isinstance(test_config.test_prefix, Path) +# assert test_config.test_prefix.is_absolute() +# +# # Test platform properties +# if test_config.target_platform: +# assert isinstance(test_config.target_platform, str) +# assert test_config.target_platform in ["linux-64", "osx-64", "osx-arm64", "win-64"] +# +# # Test channel properties +# assert isinstance(test_config.channels, list) +# assert all(isinstance(c, str) for c in test_config.channels) +# +# # Test debug property +# assert hasattr(test_config.debug, 'is_enabled') +# assert isinstance(test_config.debug.is_enabled(), bool) +# +# # Test output directory +# assert isinstance(test_config.output_dir, Path) diff --git a/py-rattler-build/tests/unit/test_variant_config.py b/py-rattler-build/tests/unit/test_variant_config.py new file mode 100644 index 000000000..7a1194a73 --- /dev/null +++ b/py-rattler-build/tests/unit/test_variant_config.py @@ -0,0 +1,470 @@ +"""Test suite for variant_config module.""" + +from pathlib import Path +import pytest +from rattler_build import Pin, VariantConfig, SelectorConfig + + +class TestPin: + """Test suite for Pin class.""" + + def test_create_empty_pin(self) -> None: + """Test creating an empty Pin.""" + pin = Pin() + assert pin.max_pin is None + assert pin.min_pin is None + + def test_create_pin_with_max_only(self) -> None: + """Test creating a Pin with only max_pin.""" + pin = Pin(max_pin="x.x") + assert pin.max_pin == "x.x" + assert pin.min_pin is None + + def test_create_pin_with_min_only(self) -> None: + """Test creating a Pin with only min_pin.""" + pin = Pin(min_pin="x.x.x") + assert pin.max_pin is None + assert pin.min_pin == "x.x.x" + + def test_create_pin_with_both(self) -> None: + """Test creating a Pin with both max_pin and min_pin.""" + pin = Pin(max_pin="x.x", min_pin="x.x.x.x") + assert pin.max_pin == "x.x" + assert pin.min_pin == "x.x.x.x" + + def test_modify_max_pin(self) -> None: + """Test modifying max_pin after creation.""" + pin = Pin() + pin.max_pin = "x.x.x" + assert pin.max_pin == "x.x.x" + + def test_modify_min_pin(self) -> None: + """Test modifying min_pin after creation.""" + pin = Pin() + pin.min_pin = "x.x" + assert pin.min_pin == "x.x" + + def test_pin_equality(self) -> None: + """Test Pin equality comparison.""" + pin1 = Pin(max_pin="x.x", min_pin="x.x.x") + pin2 = Pin(max_pin="x.x", min_pin="x.x.x") + pin3 = Pin(max_pin="x.x.x", min_pin="x.x.x") + + assert pin1 == pin2 + assert pin1 != pin3 + + def test_pin_equality_with_none(self) -> None: + """Test Pin equality when some fields are None.""" + pin1 = Pin(max_pin="x.x") + pin2 = Pin(max_pin="x.x") + pin3 = Pin(min_pin="x.x") + + assert pin1 == pin2 + assert pin1 != pin3 + + def test_pin_repr(self) -> None: + """Test Pin string representation.""" + pin = Pin(max_pin="x.x", min_pin="x.x.x") + repr_str = repr(pin) + assert "Pin" in repr_str + assert "x.x" in repr_str + assert "x.x.x" in repr_str + + def test_pin_not_equal_to_other_types(self) -> None: + """Test that Pin is not equal to other types.""" + pin = Pin(max_pin="x.x") + assert pin != "x.x" + assert pin != 42 + assert pin != {"max_pin": "x.x"} + + +class TestVariantConfig: + """Test suite for VariantConfig class.""" + + def test_create_empty_config(self) -> None: + """Test creating an empty VariantConfig.""" + config = VariantConfig() + assert config.pin_run_as_build is None + assert config.zip_keys is None + assert config.variants == {} + + def test_create_config_with_variants(self) -> None: + """Test creating a VariantConfig with variants.""" + config = VariantConfig(variants={"python": ["3.9", "3.10", "3.11"], "numpy": ["1.21", "1.22"]}) + assert len(config.variants) == 2 + assert config.variants["python"] == ["3.9", "3.10", "3.11"] + assert config.variants["numpy"] == ["1.21", "1.22"] + + def test_create_config_with_zip_keys(self) -> None: + """Test creating a VariantConfig with zip_keys.""" + config = VariantConfig(zip_keys=[["python", "numpy"], ["cuda", "cudnn"]]) + assert config.zip_keys == [["python", "numpy"], ["cuda", "cudnn"]] + + def test_create_config_with_pin_run_as_build(self) -> None: + """Test creating a VariantConfig with pin_run_as_build.""" + config = VariantConfig( + pin_run_as_build={"python": Pin(max_pin="x.x"), "numpy": Pin(max_pin="x.x", min_pin="x.x.x.x")} + ) + assert config.pin_run_as_build is not None + assert "python" in config.pin_run_as_build + assert "numpy" in config.pin_run_as_build + assert config.pin_run_as_build["python"].max_pin == "x.x" + assert config.pin_run_as_build["numpy"].min_pin == "x.x.x.x" + + def test_modify_variants(self) -> None: + """Test modifying variants after creation.""" + config = VariantConfig() + config.variants = {"rust": ["1.70", "1.71"]} + assert config.variants["rust"] == ["1.70", "1.71"] + + def test_modify_zip_keys(self) -> None: + """Test modifying zip_keys after creation.""" + config = VariantConfig() + config.zip_keys = [["cuda", "cudnn"]] + assert config.zip_keys == [["cuda", "cudnn"]] + + def test_modify_pin_run_as_build(self) -> None: + """Test modifying pin_run_as_build after creation.""" + config = VariantConfig() + config.pin_run_as_build = {"go": Pin(max_pin="x.x")} + assert config.pin_run_as_build["go"].max_pin == "x.x" + + def test_variant_config_equality(self) -> None: + """Test VariantConfig equality comparison.""" + config1 = VariantConfig(variants={"python": ["3.9", "3.10"]}, zip_keys=[["python", "numpy"]]) + config2 = VariantConfig(variants={"python": ["3.9", "3.10"]}, zip_keys=[["python", "numpy"]]) + config3 = VariantConfig(variants={"python": ["3.9", "3.11"]}, zip_keys=[["python", "numpy"]]) + + assert config1 == config2 + assert config1 != config3 + + def test_variant_config_repr(self) -> None: + """Test VariantConfig string representation.""" + config = VariantConfig(variants={"python": ["3.9"]}, pin_run_as_build={"numpy": Pin(max_pin="x.x")}) + repr_str = repr(config) + assert "VariantConfig" in repr_str + + def test_variant_config_not_equal_to_other_types(self) -> None: + """Test that VariantConfig is not equal to other types.""" + config = VariantConfig(variants={"python": ["3.9"]}) + assert config != "config" + assert config != 42 + assert config != {"variants": {"python": ["3.9"]}} + + def test_variants_with_different_types(self) -> None: + """Test variants with different value types.""" + config = VariantConfig( + variants={"python": ["3.9", "3.10"], "cuda_enabled": [True, False], "cuda_version": [11, 12]} + ) + assert config.variants["python"] == ["3.9", "3.10"] + assert config.variants["cuda_enabled"] == [True, False] + assert config.variants["cuda_version"] == [11, 12] + + def test_complex_config(self) -> None: + """Test a complex VariantConfig with all fields.""" + config = VariantConfig( + pin_run_as_build={"python": Pin(max_pin="x.x"), "numpy": Pin(max_pin="x.x", min_pin="x.x.x.x")}, + zip_keys=[["python", "numpy"]], + variants={"python": ["3.9", "3.10", "3.11"], "numpy": ["1.21", "1.22", "1.23"], "cuda": ["11.8", "12.0"]}, + ) + + # Verify all fields are set correctly + assert config.pin_run_as_build is not None + assert len(config.pin_run_as_build) == 2 + assert config.zip_keys == [["python", "numpy"]] + assert len(config.variants) == 3 + assert config.variants["python"] == ["3.9", "3.10", "3.11"] + + def test_clear_pin_run_as_build(self) -> None: + """Test setting pin_run_as_build to None.""" + config = VariantConfig(pin_run_as_build={"python": Pin(max_pin="x.x")}) + assert config.pin_run_as_build is not None + + config.pin_run_as_build = None + assert config.pin_run_as_build is None + + def test_clear_zip_keys(self) -> None: + """Test setting zip_keys to None.""" + config = VariantConfig(zip_keys=[["python", "numpy"]]) + assert config.zip_keys is not None + + config.zip_keys = None + assert config.zip_keys is None + + def test_replace_variants(self) -> None: + """Test completely replacing variants.""" + config = VariantConfig(variants={"python": ["3.9", "3.10"]}) + assert "python" in config.variants + + config.variants = {"rust": ["1.70", "1.71"]} + assert "rust" in config.variants + assert "python" not in config.variants + + def test_empty_variants_dict(self) -> None: + """Test setting variants to an empty dict.""" + config = VariantConfig(variants={"python": ["3.9", "3.10"]}) + config.variants = {} + assert config.variants == {} + + def test_multiple_zip_key_groups(self) -> None: + """Test VariantConfig with multiple zip_key groups.""" + config = VariantConfig(zip_keys=[["python", "numpy"], ["cuda", "cudnn"], ["gcc", "gxx"]]) + assert config.zip_keys is not None + assert len(config.zip_keys) == 3 + assert config.zip_keys[0] == ["python", "numpy"] + assert config.zip_keys[1] == ["cuda", "cudnn"] + assert config.zip_keys[2] == ["gcc", "gxx"] + + +class TestIntegration: + """Integration tests for Pin and VariantConfig.""" + + def test_pin_in_variant_config_round_trip(self) -> None: + """Test that Pin objects survive round-trip through VariantConfig.""" + original_pin = Pin(max_pin="x.x", min_pin="x.x.x") + config = VariantConfig(pin_run_as_build={"python": original_pin}) + + assert config.pin_run_as_build is not None + retrieved_pin = config.pin_run_as_build["python"] + assert retrieved_pin.max_pin == original_pin.max_pin + assert retrieved_pin.min_pin == original_pin.min_pin + + def test_modify_pin_after_adding_to_config(self) -> None: + """Test that modifying original Pin doesn't affect config.""" + pin = Pin(max_pin="x.x") + config = VariantConfig(pin_run_as_build={"python": pin}) + + # Modify the original pin + pin.max_pin = "x.x.x" + + # Config should still have the original value + # Note: This depends on whether we copy or reference + assert config.pin_run_as_build is not None + assert config.pin_run_as_build["python"].max_pin == "x.x" + + def test_realistic_python_variant_config(self) -> None: + """Test a realistic Python package variant configuration.""" + config = VariantConfig( + pin_run_as_build={"python": Pin(max_pin="x.x"), "numpy": Pin(max_pin="x.x")}, + zip_keys=[["python", "numpy"]], + variants={"python": ["3.9", "3.10", "3.11", "3.12"], "numpy": ["1.21", "1.22", "1.23", "1.24"]}, + ) + + assert len(config.variants["python"]) == 4 + assert len(config.variants["numpy"]) == 4 + assert config.zip_keys == [["python", "numpy"]] + + def test_realistic_cuda_variant_config(self) -> None: + """Test a realistic CUDA variant configuration.""" + config = VariantConfig( + zip_keys=[["cuda_compiler_version", "cudnn"]], + variants={ + "cuda_compiler_version": ["11.8", "12.0"], + "cudnn": ["8.6", "8.8"], + "python": ["3.9", "3.10", "3.11"], + }, + ) + + # 2 CUDA versions * 3 Python versions = 6 total variants + # (with cudnn zipped to cuda_compiler_version) + assert len(config.variants["cuda_compiler_version"]) == 2 + assert len(config.variants["cudnn"]) == 2 + assert len(config.variants["python"]) == 3 + + +class TestMerge: + """Test suite for VariantConfig.merge() method.""" + + def test_merge_variants(self) -> None: + """Test merging variants from two configs.""" + config1 = VariantConfig(variants={"python": ["3.9"], "numpy": ["1.21"]}) + config2 = VariantConfig(variants={"cuda": ["11.8"]}) + + config1.merge(config2) + + assert "python" in config1.variants + assert "numpy" in config1.variants + assert "cuda" in config1.variants + assert config1.variants["cuda"] == ["11.8"] + + def test_merge_replaces_existing_keys(self) -> None: + """Test that merge replaces existing variant keys.""" + config1 = VariantConfig(variants={"python": ["3.9"]}) + config2 = VariantConfig(variants={"python": ["3.10", "3.11"]}) + + config1.merge(config2) + + assert config1.variants["python"] == ["3.10", "3.11"] + + def test_merge_pin_run_as_build(self) -> None: + """Test merging pin_run_as_build.""" + config1 = VariantConfig(pin_run_as_build={"python": Pin(max_pin="x.x")}) + config2 = VariantConfig(pin_run_as_build={"numpy": Pin(max_pin="x.x")}) + + config1.merge(config2) + + assert config1.pin_run_as_build is not None + assert "python" in config1.pin_run_as_build + assert "numpy" in config1.pin_run_as_build + + def test_merge_replaces_zip_keys(self) -> None: + """Test that merge replaces (not merges) zip_keys.""" + config1 = VariantConfig(zip_keys=[["python", "numpy"]]) + config2 = VariantConfig(zip_keys=[["cuda", "cudnn"]]) + + config1.merge(config2) + + assert config1.zip_keys == [["cuda", "cudnn"]] + + def test_merge_with_none_zip_keys(self) -> None: + """Test merging when one config has no zip_keys.""" + config1 = VariantConfig(zip_keys=[["python", "numpy"]]) + config2 = VariantConfig(variants={"cuda": ["11.8"]}) + + config1.merge(config2) + + # According to Rust implementation, zip_keys are replaced even if None + # This matches the documented behavior: "zip_keys are replaced (not merged)" + assert config1.zip_keys is None + + def test_merge_modifies_in_place(self) -> None: + """Test that merge modifies the config in-place.""" + config1 = VariantConfig(variants={"python": ["3.9"]}) + config2 = VariantConfig(variants={"numpy": ["1.21"]}) + + original_id = id(config1) + config1.merge(config2) + + assert id(config1) == original_id + + +class TestFileLoading: + """Test suite for loading variant configs from files.""" + + @pytest.fixture + def variant_configs_dir(self) -> Path: + """Get the path to test variant config files.""" + return Path(__file__).parent.parent / "data" / "variant_configs" + + def test_load_simple_variants(self, variant_configs_dir: Path) -> None: + """Test loading a simple variants file.""" + config = VariantConfig.from_files([variant_configs_dir / "simple_variants.yaml"]) + + assert "python" in config.variants + assert "numpy" in config.variants + assert config.variants["python"] == ["3.9", "3.10", "3.11"] + assert config.variants["numpy"] == ["1.21", "1.22", "1.23"] + + def test_load_with_zip_keys(self, variant_configs_dir: Path) -> None: + """Test loading a config file with zip_keys.""" + config = VariantConfig.from_files([variant_configs_dir / "with_zip_keys.yaml"]) + + assert "python" in config.variants + assert "numpy" in config.variants + assert config.zip_keys == [["python", "numpy"]] + + def test_load_conda_build_config(self, variant_configs_dir: Path) -> None: + """Test loading a conda_build_config.yaml file.""" + config = VariantConfig.from_files([variant_configs_dir / "conda_build_config.yaml"]) + + assert "python" in config.variants + assert "cuda_compiler_version" in config.variants + assert config.variants["python"] == ["3.9", "3.10"] + assert config.variants["cuda_compiler_version"] == ["11.8", "12.0"] + + # Check pin_run_as_build + assert config.pin_run_as_build is not None + assert "python" in config.pin_run_as_build + assert config.pin_run_as_build["python"].max_pin == "x.x" + assert "numpy" in config.pin_run_as_build + assert config.pin_run_as_build["numpy"].max_pin == "x.x" + + def test_load_multiple_files_merge(self, variant_configs_dir: Path) -> None: + """Test loading and merging multiple variant config files.""" + config = VariantConfig.from_files( + [variant_configs_dir / "simple_variants.yaml", variant_configs_dir / "override_variants.yaml"] + ) + + # Python should be overridden by second file + assert config.variants["python"] == ["3.12"] + # Numpy should still be from first file + assert config.variants["numpy"] == ["1.21", "1.22", "1.23"] + # Rust should be from second file + assert config.variants["rust"] == ["1.70", "1.71"] + + def test_load_with_selector_config(self, variant_configs_dir: Path) -> None: + """Test loading with a specific SelectorConfig.""" + selector_config = SelectorConfig(target_platform="linux-64") + config = VariantConfig.from_files( + [variant_configs_dir / "simple_variants.yaml"], selector_config=selector_config + ) + + assert "python" in config.variants + + def test_load_with_string_paths(self, variant_configs_dir: Path) -> None: + """Test loading with string paths instead of Path objects.""" + config = VariantConfig.from_files([str(variant_configs_dir / "simple_variants.yaml")]) + + assert "python" in config.variants + assert config.variants["python"] == ["3.9", "3.10", "3.11"] + + def test_load_nonexistent_file(self, variant_configs_dir: Path) -> None: + """Test that loading a nonexistent file raises an error.""" + with pytest.raises(Exception): # RattlerBuildError + VariantConfig.from_files([variant_configs_dir / "nonexistent.yaml"]) + + def test_load_empty_list(self) -> None: + """Test loading with an empty file list.""" + config = VariantConfig.from_files([]) + # from_files with empty list returns a minimal config + assert config.variants.keys() == {"target_platform", "build_platform"} + assert config.zip_keys is None + assert config.pin_run_as_build is None + + def test_from_file_single(self, variant_configs_dir: Path) -> None: + """Test loading a single file with from_file method.""" + config = VariantConfig.from_file(variant_configs_dir / "simple_variants.yaml") + + assert "python" in config.variants + assert "numpy" in config.variants + # Note: from_file also adds target_platform and build_platform + assert "target_platform" in config.variants + assert "build_platform" in config.variants + + def test_from_file_with_string_path(self, variant_configs_dir: Path) -> None: + """Test from_file with string path instead of Path object.""" + config = VariantConfig.from_file(str(variant_configs_dir / "simple_variants.yaml")) + + assert "python" in config.variants + assert config.variants["python"] == ["3.9", "3.10", "3.11"] + + def test_from_file_conda_build_config(self, variant_configs_dir: Path) -> None: + """Test from_file with conda_build_config.yaml.""" + config = VariantConfig.from_file(variant_configs_dir / "conda_build_config.yaml") + + assert "python" in config.variants + assert config.pin_run_as_build is not None + assert "python" in config.pin_run_as_build + + def test_from_file_with_selector(self, variant_configs_dir: Path) -> None: + """Test from_file with a specific SelectorConfig.""" + selector = SelectorConfig(target_platform="linux-64") + config = VariantConfig.from_file(variant_configs_dir / "simple_variants.yaml", selector_config=selector) + + assert "python" in config.variants + assert config.variants["target_platform"] == ["linux-64"] + + def test_manual_merge_workflow(self, variant_configs_dir: Path) -> None: + """Test manually loading and merging configs.""" + config1 = VariantConfig.from_file(variant_configs_dir / "simple_variants.yaml") + config2 = VariantConfig.from_file(variant_configs_dir / "override_variants.yaml") + + # Merge config2 into config1 + config1.merge(config2) + + # Python should be overridden + assert config1.variants["python"] == ["3.12"] + # Rust should be added + assert config1.variants["rust"] == ["1.70", "1.71"] + # Numpy should remain from config1 + assert config1.variants["numpy"] == ["1.21", "1.22", "1.23"] diff --git a/src/variant_config.rs b/src/variant_config.rs index 8328854b7..ab2e4a837 100644 --- a/src/variant_config.rs +++ b/src/variant_config.rs @@ -241,17 +241,45 @@ impl From for VariantConfigError } impl VariantConfig { + /// Merge another variant configuration into this one. + /// - Variants are extended (keys from `other` replace keys in `self`) + /// - pin_run_as_build entries are extended + /// - zip_keys are replaced (not merged) + pub fn merge(&mut self, other: VariantConfig) { + self.variants.extend(other.variants); + if let Some(other_pin_run_as_build) = other.pin_run_as_build { + if let Some(self_pin_run_as_build) = &mut self.pin_run_as_build { + self_pin_run_as_build.extend(other_pin_run_as_build); + } else { + self.pin_run_as_build = Some(other_pin_run_as_build); + } + } + self.zip_keys = other.zip_keys; + } + /// This function loads a single variant configuration file and returns the - /// configuration. - fn load_file( + /// configuration with target_platform and build_platform already inserted. + pub fn from_file( path: &Path, selector_config: &SelectorConfig, ) -> Result>> { - if path.file_name() == Some(CONDA_BUILD_CONFIG_FILE.as_ref()) { - Ok(load_conda_build_config(path, selector_config)?) + let mut config = if path.file_name() == Some(CONDA_BUILD_CONFIG_FILE.as_ref()) { + load_conda_build_config(path, selector_config)? } else { - Self::load_variant_config(path, selector_config) - } + Self::load_variant_config(path, selector_config)? + }; + + // always insert target_platform and build_platform + config.variants.insert( + "target_platform".into(), + vec![selector_config.target_platform.to_string().into()], + ); + config.variants.insert( + "build_platform".into(), + vec![selector_config.build_platform.to_string().into()], + ); + + Ok(config) } fn load_variant_config( @@ -347,28 +375,15 @@ impl VariantConfig { files: &[PathBuf], selector_config: &SelectorConfig, ) -> Result>> { - let mut variant_configs = Vec::new(); + let mut final_config = VariantConfig::default(); for filename in files { tracing::info!("Loading variant config file: {:?}", filename); - let config = Self::load_file(filename, selector_config)?; - variant_configs.push(config); - } - - let mut final_config = VariantConfig::default(); - for config in variant_configs { - final_config.variants.extend(config.variants); - if let Some(pin_run_as_build) = config.pin_run_as_build { - if let Some(final_pin_run_as_build) = &mut final_config.pin_run_as_build { - final_pin_run_as_build.extend(pin_run_as_build); - } else { - final_config.pin_run_as_build = Some(pin_run_as_build); - } - } - final_config.zip_keys = config.zip_keys; + let config = Self::from_file(filename, selector_config)?; + final_config.merge(config); } - // always insert target_platform and build_platform + // always insert target_platform and build_platform, even if we didn't load any files final_config.variants.insert( "target_platform".into(), vec![selector_config.target_platform.to_string().into()],