From 32153329d98f20fa3e7798a631ab18a3975e768e Mon Sep 17 00:00:00 2001 From: Bas Zalmstra Date: Thu, 15 Aug 2024 15:23:45 +0200 Subject: [PATCH] refactor: make build string in recipe actually optional --- src/build.rs | 41 +++++++++------------ src/lib.rs | 4 +- src/metadata.rs | 77 +++++++++++++++++++++++---------------- src/packaging.rs | 44 +++++++++++----------- src/packaging/metadata.rs | 5 +-- src/recipe/parser.rs | 24 ------------ 6 files changed, 90 insertions(+), 105 deletions(-) diff --git a/src/build.rs b/src/build.rs index f085ee0b..61d63515 100644 --- a/src/build.rs +++ b/src/build.rs @@ -1,19 +1,19 @@ -//! The build module contains the code for running the build process for a given [`Output`] -use rattler_conda_types::{Channel, MatchSpec, ParseStrictness}; -use std::path::PathBuf; -use std::vec; +//! The build module contains the code for running the build process for a given +//! [`Output`] +use std::{path::PathBuf, vec}; use miette::IntoDiagnostic; +use rattler_conda_types::{Channel, MatchSpec, ParseStrictness}; use rattler_index::index; use rattler_solve::{ChannelPriority, SolveStrategy}; -use crate::metadata::Output; -use crate::package_test::TestConfiguration; -use crate::recipe::parser::TestType; -use crate::render::solver::load_repodatas; -use crate::{package_test, tool_configuration}; +use crate::{ + metadata::Output, package_test, package_test::TestConfiguration, recipe::parser::TestType, + render::solver::load_repodatas, tool_configuration, +}; -/// Check if the build should be skipped because it already exists in any of the channels +/// Check if the build should be skipped because it already exists in any of the +/// channels pub async fn skip_existing( mut outputs: Vec, tool_configuration: &tool_configuration::Configuration, @@ -78,14 +78,11 @@ pub async fn skip_existing( "{}-{}-{}", output.name().as_normalized(), output.version(), - output.build_string().unwrap_or_default() + &output.build_string() )); if exists { // The identifier should always be set at this point - tracing::info!( - "Skipping build for {}", - output.identifier().as_deref().unwrap_or("unknown") - ); + tracing::info!("Skipping build for {}", output.identifier()); } !exists }); @@ -93,23 +90,20 @@ pub async fn skip_existing( Ok(outputs) } -/// Run the build for the given output. This will fetch the sources, resolve the dependencies, -/// and execute the build script. Returns the path to the resulting package. +/// Run the build for the given output. This will fetch the sources, resolve the +/// dependencies, and execute the build script. Returns the path to the +/// resulting package. pub async fn run_build( output: Output, tool_configuration: &tool_configuration::Configuration, ) -> miette::Result<(Output, PathBuf)> { - if output.build_string().is_none() { - miette::bail!("Build string is not set for {:?}", output.name()); - } - output .build_configuration .directories .create_build_dir() .into_diagnostic()?; - let span = tracing::info_span!("Running build for", recipe = output.identifier().unwrap()); + let span = tracing::info_span!("Running build for", recipe = output.identifier()); let _enter = span.enter(); output.record_build_start(); @@ -148,7 +142,8 @@ pub async fn run_build( // We run all the package content tests for test in output.recipe.tests() { - // TODO we could also run each of the (potentially multiple) test scripts and collect the errors + // TODO we could also run each of the (potentially multiple) test scripts and + // collect the errors if let TestType::PackageContents { package_contents } = test { package_contents .run_test(&paths_json, &output.build_configuration.target_platform) diff --git a/src/lib.rs b/src/lib.rs index 5fb7356c..097d646e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -271,8 +271,8 @@ pub async fn get_build_output( build_string: recipe .build() .string() - .expect("Shouldn't be unset, needs major refactoring, for handling this better") - .to_owned(), + .map(ToString::to_string) + .unwrap_or_else(|| format!("{}_{}", hash, recipe.build().number())), }, ); diff --git a/src/metadata.rs b/src/metadata.rs index c5211aa6..96d4f5ca 100644 --- a/src/metadata.rs +++ b/src/metadata.rs @@ -1,5 +1,6 @@ //! All the metadata that makes up a recipe file use std::{ + borrow::Cow, collections::BTreeMap, fmt::{self, Display, Formatter}, io::Write, @@ -72,7 +73,8 @@ pub struct Directories { /// Exposed as `$PREFIX` (or `%PREFIX%` on Windows) in the build script pub host_prefix: PathBuf, /// The build prefix is the directory where build dependencies are installed - /// Exposed as `$BUILD_PREFIX` (or `%BUILD_PREFIX%` on Windows) in the build script + /// Exposed as `$BUILD_PREFIX` (or `%BUILD_PREFIX%` on Windows) in the build + /// script pub build_prefix: PathBuf, /// The work directory is the directory where the source code is copied to pub work_dir: PathBuf, @@ -210,7 +212,8 @@ fn default_true() -> bool { pub struct PackagingSettings { /// The archive type, currently supported are `tar.bz2` and `conda` pub archive_type: ArchiveType, - /// The compression level from 1-9 or -7-22 for `tar.bz2` and `conda` archives + /// The compression level from 1-9 or -7-22 for `tar.bz2` and `conda` + /// archives pub compression_level: i32, } @@ -235,7 +238,8 @@ impl PackagingSettings { pub struct BuildConfiguration { /// The target platform for the build pub target_platform: Platform, - /// The host platform (usually target platform, but for `noarch` it's the build platform) + /// The host platform (usually target platform, but for `noarch` it's the + /// build platform) pub host_platform: Platform, /// The build platform (the platform that the build is running on) pub build_platform: Platform, @@ -253,14 +257,17 @@ pub struct BuildConfiguration { pub solve_strategy: SolveStrategy, /// The timestamp to use for the build pub timestamp: chrono::DateTime, - /// All subpackages coming from this output or other outputs from the same recipe + /// All subpackages coming from this output or other outputs from the same + /// recipe pub subpackages: BTreeMap, /// Package format (.tar.bz2 or .conda) pub packaging_settings: PackagingSettings, - /// Whether to store the recipe and build instructions in the final package or not + /// Whether to store the recipe and build instructions in the final package + /// or not #[serde(skip_serializing, default = "default_true")] pub store_recipe: bool, - /// Whether to set additional environment variables to force colors in the build script or not + /// Whether to set additional environment variables to force colors in the + /// build script or not #[serde(skip_serializing, default = "default_true")] pub force_colors: bool, } @@ -301,21 +308,24 @@ pub struct BuildSummary { pub failed: bool, } -/// A output. This is the central element that is passed to the `run_build` function -/// and fully specifies all the options and settings to run the build. +/// A output. This is the central element that is passed to the `run_build` +/// function and fully specifies all the options and settings to run the build. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Output { /// The rendered recipe that is used to build this output pub recipe: Recipe, - /// The build configuration for this output (e.g. target_platform, channels, and other settings) + /// The build configuration for this output (e.g. target_platform, channels, + /// and other settings) pub build_configuration: BuildConfiguration, - /// The finalized dependencies for this output. If this is `None`, the dependencies have not been resolved yet. - /// During the `run_build` functions, the dependencies are resolved and this field is filled. + /// The finalized dependencies for this output. If this is `None`, the + /// dependencies have not been resolved yet. During the `run_build` + /// functions, the dependencies are resolved and this field is filled. pub finalized_dependencies: Option, - /// The finalized dependencies from the cache (if there is a cache instruction) + /// The finalized dependencies from the cache (if there is a cache + /// instruction) pub finalized_cache_dependencies: Option, - /// The finalized sources for this output. Contain the exact git hashes for the sources that are used - /// to build this output. + /// The finalized sources for this output. Contain the exact git hashes for + /// the sources that are used to build this output. pub finalized_sources: Option>, /// Summary of the build @@ -341,9 +351,18 @@ impl Output { self.recipe.package().version() } - /// The build string is usually set automatically as the hash of the variant configuration. - pub fn build_string(&self) -> Option<&str> { - self.recipe.build().string() + /// The build string is either the build string from the recipe or computed + /// from the hash and build number. + pub fn build_string(&self) -> Cow<'_, str> { + if let Some(build) = self.recipe.build().string() { + build.into() + } else { + format!( + "{}_{}", + &self.build_configuration.hash, self.recipe.build.number + ) + .into() + } } /// The channels to use when resolving dependencies @@ -358,13 +377,13 @@ impl Output { } /// retrieve an identifier for this output ({name}-{version}-{build_string}) - pub fn identifier(&self) -> Option { - Some(format!( + pub fn identifier(&self) -> String { + format!( "{}-{}-{}", self.name().as_normalized(), self.version(), - self.build_string()? - )) + &self.build_string() + ) } /// Record a warning during the build @@ -420,7 +439,8 @@ impl Output { } /// Search for the resolved package with the given name in the host prefix - /// Returns a tuple of the package and a boolean indicating whether the package is directly requested + /// Returns a tuple of the package and a boolean indicating whether the + /// package is directly requested pub fn find_resolved_package(&self, name: &str) -> Option<(&RepoDataRecord, bool)> { let host = self.finalized_dependencies.as_ref()?.host.as_ref()?; let record = host @@ -442,7 +462,7 @@ impl Output { /// Print a nice summary of the build pub fn log_build_summary(&self) -> Result<(), std::io::Error> { let summary = self.build_summary.lock().unwrap(); - let identifier = self.identifier().unwrap_or_default(); + let identifier = self.identifier(); let span = tracing::info_span!("Build summary for", recipe = identifier); let _enter = span.enter(); @@ -574,11 +594,7 @@ impl Output { table }; - writeln!( - f, - "Variant configuration (hash: {}):", - self.build_string().unwrap_or_default() - )?; + writeln!(f, "Variant configuration (hash: {}):", self.build_string())?; let mut table = template(); if table_format != comfy_table::presets::UTF8_FULL { table.set_header(vec!["Key", "Value"]); @@ -642,7 +658,6 @@ mod tests { #[cfg(test)] mod test { - use rstest::*; use std::str::FromStr; use chrono::TimeZone; @@ -652,11 +667,11 @@ mod test { VersionWithSource, }; use rattler_digest::{parse_digest_from_hex, Md5, Sha256}; + use rstest::*; use url::Url; - use crate::render::resolved_dependencies::{self, SourceDependency}; - use super::{Directories, Output}; + use crate::render::resolved_dependencies::{self, SourceDependency}; #[test] fn test_directories_yaml_rendering() { diff --git a/src/packaging.rs b/src/packaging.rs index 784335ea..15ef95e5 100644 --- a/src/packaging.rs +++ b/src/packaging.rs @@ -1,13 +1,17 @@ -//! This module contains the functions to package a conda package from a given output. +//! This module contains the functions to package a conda package from a given +//! output. +use std::{ + collections::HashSet, + io::Write, + path::{Component, Path, PathBuf}, +}; + use fs_err as fs; use fs_err::File; -use rattler_conda_types::Platform; -use std::collections::HashSet; -use std::io::Write; -use std::path::{Component, Path, PathBuf}; - -use rattler_conda_types::package::PathsJson; -use rattler_conda_types::package::{ArchiveType, PackageFile}; +use rattler_conda_types::{ + package::{ArchiveType, PackageFile, PathsJson}, + Platform, +}; use rattler_package_streaming::write::{ write_conda_package, write_tar_bz2_package, CompressionLevel, }; @@ -18,9 +22,7 @@ mod metadata; pub use file_finder::{content_type, Files, TempFiles}; pub use metadata::{contains_prefix_binary, contains_prefix_text, create_prefix_placeholder}; -use crate::metadata::Output; -use crate::package_test::write_test_files; -use crate::{post_process, tool_configuration}; +use crate::{metadata::Output, package_test::write_test_files, post_process, tool_configuration}; #[allow(missing_docs)] #[derive(Debug, thiserror::Error)] @@ -157,7 +159,8 @@ fn write_recipe_folder( .write_all(serde_yaml::to_string(&output.build_configuration.variant)?.as_bytes())?; files.push(variant_config_file); - // Write out the "rendered" recipe as well (the recipe with all the variables replaced with their values) + // Write out the "rendered" recipe as well (the recipe with all the variables + // replaced with their values) let rendered_recipe_file = recipe_folder.join("rendered_recipe.yaml"); let mut rendered_recipe = File::create(&rendered_recipe_file)?; rendered_recipe.write_all(serde_yaml::to_string(&output)?.as_bytes())?; @@ -277,9 +280,7 @@ pub fn package_conda( )?; } - let identifier = output - .identifier() - .ok_or(PackagingError::BuildStringNotSet)?; + let identifier = output.identifier(); let out_path = output_folder.join(format!( "{}{}", identifier, @@ -326,10 +327,10 @@ pub fn package_conda( Ok((out_path, paths_json)) } -/// When building package for noarch, we don't create another build-platform folder -/// together with noarch but conda-build does -/// because of this we have a failure in conda-smithy CI so we also *mimic* this behaviour -/// until this behaviour is changed +/// When building package for noarch, we don't create another build-platform +/// folder together with noarch but conda-build does +/// because of this we have a failure in conda-smithy CI so we also *mimic* this +/// behaviour until this behaviour is changed /// https://github.com/conda-forge/conda-forge-ci-setup-feedstock/blob/main/recipe/conda_forge_ci_setup/feedstock_outputs.py#L164 fn create_empty_build_folder( local_channel_dir: &Path, @@ -345,8 +346,9 @@ fn create_empty_build_folder( } impl Output { - /// Create a conda package from any new files in the host prefix. Note: the previous stages should have been - /// completed before calling this function. + /// Create a conda package from any new files in the host prefix. Note: the + /// previous stages should have been completed before calling this + /// function. pub async fn create_package( &self, tool_configuration: &tool_configuration::Configuration, diff --git a/src/packaging/metadata.rs b/src/packaging/metadata.rs index 595a5f0b..54176dc9 100644 --- a/src/packaging/metadata.rs +++ b/src/packaging/metadata.rs @@ -276,10 +276,7 @@ impl Output { Ok(IndexJson { name: self.name().clone(), version: self.version().clone().into(), - build: self - .build_string() - .ok_or(PackagingError::BuildStringNotSet)? - .to_string(), + build: self.build_string().into_owned(), build_number: recipe.build().number(), arch, platform, diff --git a/src/recipe/parser.rs b/src/recipe/parser.rs index 01f1f8a0..e8e27f81 100644 --- a/src/recipe/parser.rs +++ b/src/recipe/parser.rs @@ -138,28 +138,11 @@ impl Recipe { }) } - /// Build a recipe from a YAML string and use a given package hash string as default value. - pub fn from_yaml_with_default_hash_str( - yaml: &str, - default_pkg_hash: &str, - jinja_opt: SelectorConfig, - ) -> Result> { - let mut recipe = Self::from_yaml(yaml, jinja_opt)?; - - // Set the build string to the package hash if it is not set - if recipe.build.string.is_none() { - recipe.build.string = Some(format!("{}_{}", default_pkg_hash, recipe.build.number)); - } - - Ok(recipe) - } - /// Create recipes from a YAML [`Node`] structure. pub fn from_node( root_node: &Node, jinja_opt: SelectorConfig, ) -> Result> { - let hash = jinja_opt.hash.clone(); let experimental = jinja_opt.experimental; let mut jinja = Jinja::new(jinja_opt); @@ -268,13 +251,6 @@ impl Recipe { }) .flatten_errors()?; - // Add hash to build.string if it is not set - if build.string.is_none() { - if let Some(hash) = hash { - build.string = Some(format!("{}_{}", hash, build.number)); - } - } - // evaluate the skip conditions build.skip = build.skip.with_eval(&jinja)?;