Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions crates/pixi_build_discovery/src/discovery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,23 @@ pub struct BackendInitializationParams {
pub target_configuration: Option<OrderMap<TargetSelectorV1, serde_json::Value>>,
}

impl BackendInitializationParams {
/// Returns the directory that should be used as the root for resolving glob
/// patterns. Falls back to the provided path if the backend does not
/// specify an alternative source location.
pub fn glob_root_with_fallback(&self, fallback: &Path) -> PathBuf {
match &self.source {
Some(SourceLocationSpec::Path(path)) => {
let resolved = path
.resolve(&self.source_anchor)
.unwrap_or_else(|_| fallback.to_path_buf());
dunce::canonicalize(&resolved).unwrap_or(resolved)
}
_ => fallback.to_path_buf(),
}
}
}

/// Configuration to enable or disable certain protocols discovery.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[cfg_attr(feature = "serde", derive(serde::Serialize))]
Expand Down
9 changes: 9 additions & 0 deletions crates/pixi_command_dispatcher/src/build/build_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,15 @@ pub struct CachedBuildSourceInfo {
/// considered stale.
#[serde(default)]
pub package_build_input_hash: Option<PackageBuildInputHash>,

/// The root directory that is used when resolving the glob patterns.
/// this is especially useful for out-of-tree path builds, that move the
/// root to a different location.
///
/// E.g we are building from `package.build.source.path = "../"` we want
/// to resolve globs from that location
#[serde(default, skip_serializing_if = "Option::is_none")]
pub glob_root: Option<PathBuf>,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the new field.

}

#[serde_as]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,9 @@ pub struct CachedCondaMetadata {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub input_hash: Option<InputHash>,

#[serde(default, skip_serializing_if = "Option::is_none")]
pub glob_root: Option<PathBuf>,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This as well.


#[serde(flatten)]
pub metadata: MetadataKind,
}
Expand Down
45 changes: 29 additions & 16 deletions crates/pixi_command_dispatcher/src/build_backend_metadata/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::{
collections::{BTreeMap, BTreeSet, HashSet},
hash::{Hash, Hasher},
path::PathBuf,
path::{Path, PathBuf},
sync::Mutex,
};

Expand Down Expand Up @@ -126,6 +126,10 @@ impl BuildBackendMetadataSpec {
&self.variants,
);

let glob_root = discovered_backend
.init_params
.glob_root_with_fallback(&source_checkout.path);

// Check if we should skip the metadata cache for this backend
let skip_cache = Self::should_skip_metadata_cache(
&discovered_backend.backend_spec,
Expand All @@ -143,13 +147,9 @@ impl BuildBackendMetadataSpec {
.map_err(CommandDispatcherError::Failed)?;

if !skip_cache {
if let Some(metadata) = Self::verify_cache_freshness(
&source_checkout,
&command_dispatcher,
metadata,
&additional_glob_hash,
)
.await?
if let Some(metadata) =
Self::verify_cache_freshness(&command_dispatcher, metadata, &additional_glob_hash)
.await?
{
return Ok(BuildBackendMetadata {
metadata,
Expand Down Expand Up @@ -198,6 +198,7 @@ impl BuildBackendMetadataSpec {
source_checkout,
backend,
additional_glob_hash,
glob_root,
log_sink,
)
.await?;
Expand Down Expand Up @@ -270,7 +271,6 @@ impl BuildBackendMetadataSpec {
}

async fn verify_cache_freshness(
source_checkout: &SourceCheckout,
command_dispatcher: &CommandDispatcher,
metadata: Option<CachedCondaMetadata>,
additional_glob_hash: &[u8],
Expand All @@ -293,11 +293,19 @@ impl BuildBackendMetadataSpec {
return Ok(Some(metadata));
};

let Some(cached_root) = metadata.glob_root.as_ref() else {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Here we regenerate if its not found.

tracing::debug!(
"cached `{metadata_kind}` response missing glob root; regenerating metadata"
);
return Ok(None);
};
let effective_root = cached_root.as_path();

// Check if the input hash is still valid.
let new_hash = command_dispatcher
.glob_hash_cache()
.compute_hash(GlobHashKey::new(
source_checkout.path.clone(),
effective_root.to_path_buf(),
input_globs.globs.clone(),
additional_glob_hash.to_vec(),
))
Expand All @@ -322,6 +330,7 @@ impl BuildBackendMetadataSpec {
source_checkout: SourceCheckout,
backend: Backend,
additional_glob_hash: Vec<u8>,
glob_root: PathBuf,
mut log_sink: UnboundedSender<String>,
) -> Result<CachedCondaMetadata, CommandDispatcherError<BuildBackendMetadataError>> {
let backend_identifier = backend.identifier().to_string();
Expand Down Expand Up @@ -365,7 +374,7 @@ impl BuildBackendMetadataSpec {
let input_globs = extend_input_globs_with_variant_files(
outputs.input_globs.clone(),
&self.variant_files,
&source_checkout,
&glob_root,
);
tracing::debug!(
backend = %backend_identifier,
Expand All @@ -376,6 +385,7 @@ impl BuildBackendMetadataSpec {
let input_hash = Self::compute_input_hash(
command_dispatcher,
&source_checkout,
&glob_root,
input_globs,
additional_glob_hash,
)
Expand All @@ -384,6 +394,7 @@ impl BuildBackendMetadataSpec {
Ok(CachedCondaMetadata {
id: random(),
input_hash: input_hash.clone(),
glob_root: Some(glob_root),
metadata: MetadataKind::Outputs {
outputs: outputs.outputs,
},
Expand All @@ -394,6 +405,7 @@ impl BuildBackendMetadataSpec {
async fn compute_input_hash(
command_queue: CommandDispatcher,
source: &SourceCheckout,
glob_root: &Path,
input_globs: BTreeSet<String>,
additional_glob_hash: Vec<u8>,
) -> Result<Option<InputHash>, CommandDispatcherError<BuildBackendMetadataError>> {
Expand All @@ -407,7 +419,7 @@ impl BuildBackendMetadataSpec {
let input_hash = command_queue
.glob_hash_cache()
.compute_hash(GlobHashKey::new(
&source.path,
glob_root.to_path_buf(),
input_globs.clone(),
additional_glob_hash,
))
Expand Down Expand Up @@ -439,14 +451,15 @@ impl BuildBackendMetadataSpec {
fn extend_input_globs_with_variant_files(
mut input_globs: BTreeSet<String>,
variant_files: &Option<Vec<PathBuf>>,
source_checkout: &SourceCheckout,
glob_root: &Path,
) -> BTreeSet<String> {
if let Some(variant_files) = variant_files {
for variant_file in variant_files {
let relative = match variant_file.strip_prefix(&source_checkout.path) {
let relative = match variant_file.strip_prefix(glob_root) {
Ok(stripped) => stripped.to_path_buf(),
Err(_) => diff_paths(variant_file, &source_checkout.path)
.unwrap_or_else(|| variant_file.clone()),
Err(_) => {
diff_paths(variant_file, glob_root).unwrap_or_else(|| variant_file.clone())
}
};
let glob = relative.to_string_lossy().replace("\\", "/");
input_globs.insert(glob);
Expand Down
8 changes: 8 additions & 0 deletions crates/pixi_command_dispatcher/src/source_build/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,10 @@ impl SourceBuildSpec {
.await
.map_err_with(SourceBuildError::Discovery)?;

let glob_root = discovered_backend
.init_params
.glob_root_with_fallback(&source_checkout.path);

// Compute the package input hash for caching purposes.
let package_build_input_hash = PackageBuildInputHash::from(discovered_backend.as_ref());

Expand Down Expand Up @@ -287,6 +291,7 @@ impl SourceBuildSpec {
backend,
work_directory,
package_build_input_hash,
glob_root.clone(),
reporter,
log_sink,
)
Expand Down Expand Up @@ -401,12 +406,14 @@ impl SourceBuildSpec {
self.build_profile == BuildProfile::Development && self.source.is_mutable()
}

#[allow(clippy::too_many_arguments)]
async fn build_v1(
self,
command_dispatcher: CommandDispatcher,
backend: Backend,
work_directory: PathBuf,
package_build_input_hash: PackageBuildInputHash,
glob_root: PathBuf,
reporter: Option<Arc<dyn RunExportsReporter>>,
mut log_sink: UnboundedSender<String>,
) -> Result<BuiltPackage, CommandDispatcherError<SourceBuildError>> {
Expand Down Expand Up @@ -652,6 +659,7 @@ impl SourceBuildSpec {
packages: host_records,
},
package_build_input_hash: Some(package_build_input_hash),
glob_root: Some(glob_root),
},
})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -365,14 +365,22 @@ impl SourceBuildCacheStatusSpec {
};

// Checkout the source for the package.
let source_checkout = command_dispatcher
let _source_checkout = command_dispatcher
.checkout_pinned_source(source.clone())
.await
.map_err_with(SourceBuildCacheStatusError::SourceCheckout)?;

let Some(glob_root) = source_info.glob_root.as_ref().cloned() else {
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same here.

tracing::debug!(
"cached build missing glob root information; marking '{}' as stale",
self.package
);
return Ok(CachedBuildStatus::Stale(cached_build));
};

// Compute the modification time of the files that match the source input globs.
let glob_time = match GlobModificationTime::from_patterns(
&source_checkout.path,
&glob_root,
source_info.globs.iter().map(String::as_str),
) {
Ok(glob_time) => glob_time,
Expand Down
6 changes: 5 additions & 1 deletion crates/pixi_core/src/lock_file/satisfiability/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1516,6 +1516,10 @@ pub(crate) async fn verify_package_platform_satisfiability(
.map_err(PlatformUnsat::BackendDiscovery)
.map_err(Box::new)?;

let glob_root = discovered_backend
.init_params
.glob_root_with_fallback(&source_dir);

let VariantConfig { variants, .. } = environment
.workspace()
.variants(platform)
Expand All @@ -1528,7 +1532,7 @@ pub(crate) async fn verify_package_platform_satisfiability(

let input_hash = input_hash_cache
.compute_hash(GlobHashKey::new(
source_dir,
glob_root,
locked_input_hash.globs.clone(),
additional_glob_hash,
))
Expand Down
Loading