diff --git a/crates/pixi_build_frontend/src/build_frontend.rs b/crates/pixi_build_frontend/src/build_frontend.rs index 6cda5aaf6..47eae1e53 100644 --- a/crates/pixi_build_frontend/src/build_frontend.rs +++ b/crates/pixi_build_frontend/src/build_frontend.rs @@ -89,7 +89,7 @@ impl BuildFrontend { protocol .with_backend_override(request.build_tool_override) - .finish(&self.tool_cache) + .finish(&self.tool_cache, request.build_id) .await } } diff --git a/crates/pixi_build_frontend/src/lib.rs b/crates/pixi_build_frontend/src/lib.rs index 7ecc388d9..ff86b3526 100644 --- a/crates/pixi_build_frontend/src/lib.rs +++ b/crates/pixi_build_frontend/src/lib.rs @@ -66,6 +66,10 @@ pub struct SetupRequest { /// Overrides for the build tool. pub build_tool_override: Option, + + /// Identifier for the rest of the requests + /// This is used to identify the requests that belong to the same build. + pub build_id: usize, } #[derive(Debug)] diff --git a/crates/pixi_build_frontend/src/protocol.rs b/crates/pixi_build_frontend/src/protocol.rs index 9dfca0af8..55223f201 100644 --- a/crates/pixi_build_frontend/src/protocol.rs +++ b/crates/pixi_build_frontend/src/protocol.rs @@ -1,10 +1,10 @@ -use miette::Diagnostic; +use std::{path::PathBuf, sync::Arc}; + +use miette::{Diagnostic, IntoDiagnostic}; use pixi_build_types::procedures::{ conda_build::{CondaBuildParams, CondaBuildResult}, conda_metadata::{CondaMetadataParams, CondaMetadataResult}, }; -use std::path::PathBuf; -use std::sync::Arc; use crate::{conda_build_protocol, pixi_protocol, CondaBuildReporter, CondaMetadataReporter}; @@ -100,11 +100,10 @@ impl Protocol { reporter: Arc, ) -> miette::Result { match self { - Self::Pixi(protocol) => { - protocol - .get_conda_metadata(request, reporter.as_ref()) - .await - } + Self::Pixi(protocol) => protocol + .get_conda_metadata(request, reporter.as_ref()) + .await + .into_diagnostic(), Self::CondaBuild(protocol) => protocol.get_conda_metadata(request), } } @@ -115,7 +114,10 @@ impl Protocol { reporter: Arc, ) -> miette::Result { match self { - Self::Pixi(protocol) => protocol.conda_build(request, reporter.as_ref()).await, + Self::Pixi(protocol) => protocol + .conda_build(request, reporter.as_ref()) + .await + .into_diagnostic(), Self::CondaBuild(_) => unreachable!(), } } diff --git a/crates/pixi_build_frontend/src/protocol_builder.rs b/crates/pixi_build_frontend/src/protocol_builder.rs index 0764ba1eb..d79056058 100644 --- a/crates/pixi_build_frontend/src/protocol_builder.rs +++ b/crates/pixi_build_frontend/src/protocol_builder.rs @@ -92,11 +92,15 @@ impl ProtocolBuilder { } /// Finish the construction of the protocol and return the protocol object - pub async fn finish(self, tool_cache: &ToolCache) -> Result { + pub async fn finish( + self, + tool_cache: &ToolCache, + build_id: usize, + ) -> Result { match self { Self::Pixi(protocol) => Ok(Protocol::Pixi( protocol - .finish(tool_cache) + .finish(tool_cache, build_id) .await .map_err(FinishError::Pixi)?, )), diff --git a/crates/pixi_build_frontend/src/protocols/pixi/mod.rs b/crates/pixi_build_frontend/src/protocols/pixi/mod.rs index a07388191..a5918491b 100644 --- a/crates/pixi_build_frontend/src/protocols/pixi/mod.rs +++ b/crates/pixi_build_frontend/src/protocols/pixi/mod.rs @@ -1,4 +1,5 @@ mod protocol; +mod stderr; use std::{ fmt, @@ -11,6 +12,7 @@ use pixi_consts::consts; use pixi_manifest::Manifest; pub use protocol::{InitializeError, Protocol}; use rattler_conda_types::ChannelConfig; +pub(crate) use stderr::{stderr_null, stderr_stream}; use thiserror::Error; use which::Error; @@ -116,7 +118,7 @@ impl ProtocolBuilder { Ok(None) } - pub async fn finish(self, tool: &ToolCache) -> Result { + pub async fn finish(self, tool: &ToolCache, build_id: usize) -> Result { let tool_spec = self .backend_spec .ok_or(FinishError::NoBuildSection(self.manifest.path.clone()))?; @@ -124,7 +126,7 @@ impl ProtocolBuilder { Ok(Protocol::setup( self.source_dir, self.manifest.path, - self.manifest.parsed.project.name, + build_id, self.cache_dir, self.channel_config, tool, diff --git a/crates/pixi_build_frontend/src/protocols/pixi/protocol.rs b/crates/pixi_build_frontend/src/protocols/pixi/protocol.rs index ca64846cd..c7e4b8431 100644 --- a/crates/pixi_build_frontend/src/protocols/pixi/protocol.rs +++ b/crates/pixi_build_frontend/src/protocols/pixi/protocol.rs @@ -1,11 +1,12 @@ -use std::{ffi::OsStr, path::PathBuf}; +use std::{ffi::OsStr, path::PathBuf, sync::Arc}; +use futures::TryFutureExt; use jsonrpsee::{ async_client::{Client, ClientBuilder}, core::client::{ClientT, Error, Error as ClientError, TransportReceiverT, TransportSenderT}, types::ErrorCode, }; -use miette::{diagnostic, Diagnostic, IntoDiagnostic}; +use miette::{diagnostic, Diagnostic}; use pixi_build_types::{ procedures, procedures::{ @@ -17,7 +18,13 @@ use pixi_build_types::{ }; use rattler_conda_types::ChannelConfig; use thiserror::Error; +use tokio::{ + io::{AsyncBufReadExt, BufReader, Lines}, + process::ChildStderr, + sync::{oneshot, Mutex}, +}; +use super::{stderr_null, stderr_stream}; use crate::{ jsonrpc::{stdio_transport, Receiver, RpcParams, Sender}, protocols::error::BackendError, @@ -72,6 +79,9 @@ pub enum ProtocolError { "This is often caused by the build backend incorrectly reporting certain capabilities. Consider contacting the build backend maintainers for a fix." ))] MethodNotImplemented(String, String), + + #[error("pipe of stderr stopped earlier than expected")] + StdErrPipeStopped, } /// A protocol that uses a pixi manifest to invoke a build backend. @@ -87,17 +97,20 @@ pub struct Protocol { /// The path to the manifest relative to the source directory. relative_manifest_path: PathBuf, - /// Name of the project taken from the manifest - project_name: Option, - _backend_capabilities: BackendCapabilities, + + /// The build identifier + build_id: usize, + + /// The handle to the stderr of the backend process. + stderr: Option>>>>, } impl Protocol { pub(crate) async fn setup( source_dir: PathBuf, manifest_path: PathBuf, - project_name: Option, + build_id: usize, cache_dir: Option, channel_config: ChannelConfig, tool: Tool, @@ -108,7 +121,7 @@ impl Protocol { let mut process = tokio::process::Command::from(tool.command()) .stdout(std::process::Stdio::piped()) .stdin(std::process::Stdio::piped()) - .stderr(std::process::Stdio::inherit()) // TODO: Capture this? + .stderr(std::process::Stdio::piped()) // TODO: Capture this? .spawn()?; let backend_identifier = tool @@ -125,6 +138,10 @@ impl Protocol { let stdout = process .stdout .expect("since we piped stdout we expect a valid value here"); + let stderr = process + .stderr + .map(|stderr| BufReader::new(stderr).lines()) + .expect("since we piped stderr we expect a valid value here"); // Construct a JSON-RPC client to communicate with the backend process. let (tx, rx) = stdio_transport(stdin, stdout); @@ -132,11 +149,12 @@ impl Protocol { backend_identifier, source_dir, manifest_path, - project_name, + build_id, cache_dir, channel_config, tx, rx, + Some(stderr), ) .await } @@ -145,11 +163,12 @@ impl Protocol { "".to_string(), source_dir, manifest_path, - project_name, + build_id, cache_dir, channel_config, Sender::from(ipc.rpc_out), Receiver::from(ipc.rpc_in), + None, ) .await } @@ -161,11 +180,12 @@ impl Protocol { backend_identifier: String, source_dir: PathBuf, manifest_path: PathBuf, - project_name: Option, + build_id: usize, cache_dir: Option, channel_config: ChannelConfig, sender: impl TransportSenderT + Send, receiver: impl TransportReceiverT + Send, + stderr: Option>>, ) -> Result { let relative_manifest_path = manifest_path .strip_prefix(source_dir) @@ -199,10 +219,11 @@ impl Protocol { Ok(Self { backend_identifier, _channel_config: channel_config, - project_name, client, _backend_capabilities: result.capabilities, relative_manifest_path, + build_id, + stderr: stderr.map(Mutex::new).map(Arc::new), }) } @@ -220,8 +241,19 @@ impl Protocol { &self, request: &CondaMetadataParams, reporter: &dyn CondaMetadataReporter, - ) -> miette::Result { - let operation = reporter.on_metadata_start(self.project_name.as_deref().unwrap_or("")); + ) -> Result { + // Capture all of stderr and discard it + let stderr = self.stderr.as_ref().map(|stderr| { + // Cancellation signal + let (cancel_tx, cancel_rx) = oneshot::channel(); + // Spawn the stderr forwarding task + let handle = tokio::spawn(stderr_null(stderr.clone(), cancel_rx)); + (cancel_tx, handle) + }); + + // Start the metadata operation + let operation = reporter.on_metadata_start(self.build_id); + let result = self .client .request( @@ -235,8 +267,23 @@ impl Protocol { err, procedures::conda_metadata::METHOD_NAME, ) - }) - .into_diagnostic(); + }); + + // Wait for the stderr sink to finish, by signaling it to stop + if let Some((cancel_tx, handle)) = stderr { + // Cancel the stderr forwarding + if cancel_tx.send(()).is_err() { + return Err(ProtocolError::StdErrPipeStopped); + } + handle.await.map_or_else( + |e| match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => Err(ProtocolError::StdErrPipeStopped), + }, + |e| e.map_err(|_| ProtocolError::StdErrPipeStopped), + )?; + } + reporter.on_metadata_end(operation); result } @@ -246,23 +293,76 @@ impl Protocol { &self, request: &CondaBuildParams, reporter: &dyn CondaBuildReporter, - ) -> miette::Result { - let operation = reporter.on_build_start(self.project_name.as_deref().unwrap_or("")); - let result = self + ) -> Result { + // Captures stderr output + let stderr = self.stderr.as_ref().map(|stderr| { + let (sender, receiver) = tokio::sync::mpsc::channel(100); + let (cancel_tx, cancel_rx) = oneshot::channel(); + let handle = tokio::spawn(stderr_stream(stderr.clone(), sender, cancel_rx)); + (cancel_tx, receiver, handle) + }); + + let operation = reporter.on_build_start(self.build_id); + let request = self .client .request( procedures::conda_build::METHOD_NAME, RpcParams::from(request), ) - .await .map_err(|err| { ProtocolError::from_client_error( self.backend_identifier.clone(), err, procedures::conda_build::METHOD_NAME, ) - }) - .into_diagnostic(); + }); + + // There can be two cases, the stderr is captured or is not captured + // In the case of capturing we need to select between the request and the stderr + // forwarding to drive these two futures concurrently + // + // In the other case we can just wait for the request to finish + let result = if let Some((cancel_tx, receiver, handle)) = stderr { + // This is the case where we capture stderr + + // Create a future that will forward stderr to the reporter + let send_stderr = async { + let mut receiver = receiver; + while let Some(line) = receiver.recv().await { + reporter.on_build_output(operation, line); + } + }; + + // Select between the request and the stderr forwarding + let result = tokio::select! { + result = request => result, + _ = send_stderr => { + Err(ProtocolError::StdErrPipeStopped) + } + }; + + // Cancel the stderr forwarding + if cancel_tx.send(()).is_err() { + return Err(ProtocolError::StdErrPipeStopped); + } + + // Wait for the stderr forwarding to finish, it should because we cancelled + handle.await.map_or_else( + |e| match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_) => Err(ProtocolError::StdErrPipeStopped), + }, + |e| e.map_err(|_| ProtocolError::StdErrPipeStopped), + )?; + + // Return the result + result + } else { + // This is the case where we don't capture stderr + request.await + }; + + // Build has completed reporter.on_build_end(operation); result } diff --git a/crates/pixi_build_frontend/src/protocols/pixi/stderr.rs b/crates/pixi_build_frontend/src/protocols/pixi/stderr.rs new file mode 100644 index 000000000..a49fbc244 --- /dev/null +++ b/crates/pixi_build_frontend/src/protocols/pixi/stderr.rs @@ -0,0 +1,54 @@ +use std::sync::Arc; + +use tokio::{ + io::{BufReader, Lines}, + process::ChildStderr, + sync::{mpsc, oneshot, Mutex}, +}; + +/// Stderr sink that captures the stderr output of the backend +/// but does not do anything with it. +pub(crate) async fn stderr_null( + buffer: Arc>>>, + cancel: oneshot::Receiver<()>, +) -> Result<(), std::io::Error> { + tokio::select! { + // Please stop + _ = cancel => { + Ok(()) + } + // Please keep reading + result = async { + let mut lines = buffer.lock().await; + while let Some(_line) = lines.next_line().await? {} + Ok(()) + } => { + result + } + } +} + +/// Stderr stream that captures the stderr output of the backend +/// and sends it over the stream. +pub(crate) async fn stderr_stream( + buffer: Arc>>>, + sender: mpsc::Sender, + cancel: oneshot::Receiver<()>, +) -> Result<(), std::io::Error> { + tokio::select! { + _ = cancel => { + Ok(()) + } + result = async { + let mut lines = buffer.lock().await; + while let Some(line) = lines.next_line().await? { + if let Err(err) = sender.send(line).await { + return Err(std::io::Error::new(std::io::ErrorKind::Other, err)); + } + } + Ok(()) + } => { + result + } + } +} diff --git a/crates/pixi_build_frontend/src/reporters.rs b/crates/pixi_build_frontend/src/reporters.rs index 01a2beee1..ec96a15de 100644 --- a/crates/pixi_build_frontend/src/reporters.rs +++ b/crates/pixi_build_frontend/src/reporters.rs @@ -3,7 +3,9 @@ use std::sync::Arc; /// Reporter trait for reporting the progress of metadata operations. pub trait CondaMetadataReporter: Send + Sync { /// Reports the start of the get_conda_metadata operation. - fn on_metadata_start(&self, identifier: &str) -> usize; + /// Returns a unique identifier for the operation. + fn on_metadata_start(&self, build_id: usize) -> usize; + /// Reports the end of the get_conda_metadata operation. fn on_metadata_end(&self, operation: usize); } @@ -12,7 +14,7 @@ pub trait CondaMetadataReporter: Send + Sync { #[derive(Clone)] pub struct NoopCondaMetadataReporter; impl CondaMetadataReporter for NoopCondaMetadataReporter { - fn on_metadata_start(&self, _identifier: &str) -> usize { + fn on_metadata_start(&self, _build_id: usize) -> usize { 0 } fn on_metadata_end(&self, _operation: usize) {} @@ -27,19 +29,28 @@ impl NoopCondaMetadataReporter { /// Reporter trait for reporting the progress of build operations. pub trait CondaBuildReporter: Send + Sync { /// Reports the start of the build_conda operation. - fn on_build_start(&self, identifier: &str) -> usize; + /// Returns a unique identifier for the operation. + fn on_build_start(&self, build_id: usize) -> usize; + /// Reports the end of the build_conda operation. fn on_build_end(&self, operation: usize); + + /// Reports output from the build process. + fn on_build_output(&self, operation: usize, line: String); } /// A no-op implementation of the CondaBuildReporter trait. #[derive(Clone)] pub struct NoopCondaBuildReporter; impl CondaBuildReporter for NoopCondaBuildReporter { - fn on_build_start(&self, _identifier: &str) -> usize { + fn on_build_start(&self, _build_id: usize) -> usize { 0 } fn on_build_end(&self, _operation: usize) {} + + fn on_build_output(&self, _operation: usize, _line: String) { + todo!() + } } impl NoopCondaBuildReporter { diff --git a/crates/pixi_build_frontend/tests/diagnostics.rs b/crates/pixi_build_frontend/tests/diagnostics.rs index 837a29152..47cd1be8b 100644 --- a/crates/pixi_build_frontend/tests/diagnostics.rs +++ b/crates/pixi_build_frontend/tests/diagnostics.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use miette::{Diagnostic, GraphicalReportHandler, GraphicalTheme}; use pixi_build_frontend::{BuildFrontend, InProcessBackend, SetupRequest}; @@ -17,6 +19,7 @@ async fn test_non_existing_discovery() { .setup_protocol(SetupRequest { source_dir: "non/existing/path".into(), build_tool_override: Default::default(), + build_id: 0, }) .await .unwrap_err(); @@ -31,6 +34,7 @@ async fn test_source_dir_is_file() { .setup_protocol(SetupRequest { source_dir: source_file.path().to_path_buf(), build_tool_override: Default::default(), + build_id: 0, }) .await .unwrap_err(); @@ -47,12 +51,13 @@ async fn test_source_dir_is_empty() { .setup_protocol(SetupRequest { source_dir: source_dir.path().to_path_buf(), build_tool_override: Default::default(), + build_id: 0, }) .await .unwrap_err(); let snapshot = error_to_snapshot(&err); - let snapshot = snapshot.replace(&source_dir.path().display().to_string(), "[SOURCE_DIR]"); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); insta::assert_snapshot!(snapshot); } @@ -67,18 +72,24 @@ async fn test_invalid_manifest() { .setup_protocol(SetupRequest { source_dir: source_dir.path().to_path_buf(), build_tool_override: Default::default(), + build_id: 0, }) .await .unwrap_err(); let snapshot = error_to_snapshot(&err); - let snapshot = snapshot - .replace(&source_dir.path().display().to_string(), "[SOURCE_DIR]") - .replace('\\', "/"); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); insta::assert_snapshot!(snapshot); } +fn replace_source_dir(snapshot: &str, source_dir: &Path) -> String { + snapshot.replace( + &(source_dir.display().to_string() + std::path::MAIN_SEPARATOR_STR), + "[SOURCE_DIR]/", + ) +} + #[tokio::test] async fn test_missing_backend() { // Setup a temporary project @@ -106,14 +117,13 @@ async fn test_missing_backend() { .setup_protocol(SetupRequest { source_dir: source_dir.path().to_path_buf(), build_tool_override: Default::default(), + build_id: 0, }) .await .unwrap_err(); let snapshot = error_to_snapshot(&err); - let snapshot = snapshot - .replace(&source_dir.path().display().to_string(), "[SOURCE_DIR]") - .replace('\\', "/"); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); insta::assert_snapshot!(snapshot); } @@ -151,13 +161,12 @@ async fn test_invalid_backend() { .setup_protocol(SetupRequest { source_dir: source_dir.path().to_path_buf(), build_tool_override: ipc.into(), + build_id: 0, }) .await .unwrap_err(); let snapshot = error_to_snapshot(&err); - let snapshot = snapshot - .replace(&source_dir.path().display().to_string(), "[SOURCE_DIR]") - .replace('\\', "/"); + let snapshot = replace_source_dir(&snapshot, source_dir.path()); insta::assert_snapshot!(snapshot); } diff --git a/src/build/mod.rs b/src/build/mod.rs index 79cd97635..89d063c4a 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -1,4 +1,5 @@ mod cache; +mod reporters; use std::{ ffi::OsStr, @@ -13,7 +14,7 @@ use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; use chrono::Utc; use itertools::Itertools; use miette::Diagnostic; -use pixi_build_frontend::{CondaBuildReporter, CondaMetadataReporter, SetupRequest}; +use pixi_build_frontend::SetupRequest; use pixi_build_types::{ procedures::{ conda_build::{CondaBuildParams, CondaOutputIdentifier}, @@ -40,6 +41,8 @@ use crate::build::cache::{ SourceMetadataInput, }; +pub use reporters::{BuildMetadataReporter, BuildReporter}; + /// The [`BuildContext`] is used to build packages from source. #[derive(Clone)] pub struct BuildContext { @@ -133,7 +136,8 @@ impl BuildContext { host_virtual_packages: Vec, build_platform: Platform, build_virtual_packages: Vec, - metadata_reporter: Arc, + metadata_reporter: Arc, + build_id: usize, ) -> Result { let source = self.fetch_source(source_spec).await?; let records = self @@ -145,6 +149,7 @@ impl BuildContext { build_platform, build_virtual_packages, metadata_reporter.clone(), + build_id, ) .await?; @@ -161,7 +166,8 @@ impl BuildContext { host_platform: Platform, host_virtual_packages: Vec, build_virtual_packages: Vec, - build_reporter: Arc, + build_reporter: Arc, + build_id: usize, ) -> Result { let source_checkout = SourceCheckout { path: self.fetch_pinned_source(&source_spec.source).await?, @@ -207,6 +213,7 @@ impl BuildContext { .map(|t| t >= chrono::DateTime::::from(modified_at)) .unwrap_or(false) { + build_reporter.on_build_cached(build_id); tracing::debug!("found an up-to-date cached build."); return Ok(build.record); } else { @@ -226,6 +233,7 @@ impl BuildContext { } } else { tracing::debug!("found a cached build"); + build_reporter.on_build_cached(build_id); // If there is no source info in the cache we assume its still valid. return Ok(build.record); @@ -239,6 +247,7 @@ impl BuildContext { .setup_protocol(SetupRequest { source_dir: source_checkout.path.clone(), build_tool_override: Default::default(), + build_id, }) .await .map_err(BuildError::BuildFrontendSetup)?; @@ -271,7 +280,7 @@ impl BuildContext { .key(), ), }, - build_reporter.clone(), + build_reporter.as_conda_build_reporter(), ) .await .map_err(|e| BuildError::BackendError(e.into()))?; @@ -422,7 +431,8 @@ impl BuildContext { host_virtual_packages: Vec, build_platform: Platform, build_virtual_packages: Vec, - metadata_reporter: Arc, + metadata_reporter: Arc, + build_id: usize, ) -> Result, BuildError> { let (cached_metadata, cache_entry) = self .source_metadata_cache @@ -459,7 +469,7 @@ impl BuildContext { } } else { tracing::debug!("found cached metadata."); - + metadata_reporter.on_metadata_cached(build_id); // No input hash so just assume it is still valid. return Ok(source_metadata_to_records( source, @@ -475,6 +485,7 @@ impl BuildContext { .setup_protocol(SetupRequest { source_dir: source.path.clone(), build_tool_override: Default::default(), + build_id, }) .await .map_err(BuildError::BuildFrontendSetup)?; @@ -504,7 +515,7 @@ impl BuildContext { .key(), ), }, - metadata_reporter.clone(), + metadata_reporter.as_conda_metadata_reporter().clone(), ) .await .map_err(|e| BuildError::BackendError(e.into()))?; diff --git a/src/build/reporters.rs b/src/build/reporters.rs new file mode 100644 index 000000000..4def2bfbf --- /dev/null +++ b/src/build/reporters.rs @@ -0,0 +1,55 @@ +use std::sync::Arc; + +use pixi_build_frontend::{CondaBuildReporter, CondaMetadataReporter}; + +pub trait BuildMetadataReporter: CondaMetadataReporter { + /// Reporters that the metadata has been cached. + fn on_metadata_cached(&self, build_id: usize); + + /// Cast upwards + fn as_conda_metadata_reporter(self: Arc) -> Arc; +} + +/// Noop implementation of the BuildMetadataReporter trait. +struct NoopBuildMetadataReporter; +impl CondaMetadataReporter for NoopBuildMetadataReporter { + fn on_metadata_start(&self, _build_id: usize) -> usize { + 0 + } + + fn on_metadata_end(&self, _operation: usize) {} +} +impl BuildMetadataReporter for NoopBuildMetadataReporter { + fn on_metadata_cached(&self, _build_id: usize) {} + + fn as_conda_metadata_reporter(self: Arc) -> Arc { + self + } +} + +pub trait BuildReporter: CondaBuildReporter { + /// Reports that the build has been cached. + fn on_build_cached(&self, build_id: usize); + + /// Cast upwards + fn as_conda_build_reporter(self: Arc) -> Arc; +} + +/// Noop implementation of the BuildReporter trait. +struct NoopBuildReporter; +impl CondaBuildReporter for NoopBuildReporter { + fn on_build_start(&self, _build_id: usize) -> usize { + 0 + } + + fn on_build_end(&self, _operation: usize) {} + + fn on_build_output(&self, _operation: usize, _line: String) {} +} +impl BuildReporter for NoopBuildReporter { + fn on_build_cached(&self, _build_id: usize) {} + + fn as_conda_build_reporter(self: Arc) -> Arc { + self + } +} diff --git a/src/cli/build.rs b/src/cli/build.rs index 4dc3accb1..4bb03957a 100644 --- a/src/cli/build.rs +++ b/src/cli/build.rs @@ -1,8 +1,9 @@ -use std::path::PathBuf; +use std::{path::PathBuf, sync::Arc, time::Duration}; use clap::Parser; +use indicatif::ProgressBar; use miette::{Context, IntoDiagnostic}; -use pixi_build_frontend::{NoopCondaBuildReporter, SetupRequest}; +use pixi_build_frontend::{CondaBuildReporter, SetupRequest}; use pixi_build_types::{ procedures::conda_build::CondaBuildParams, ChannelConfiguration, PlatformAndVirtualPackages, }; @@ -34,6 +35,46 @@ pub struct Args { pub output_dir: PathBuf, } +struct ProgressReporter { + progress_bar: indicatif::ProgressBar, +} + +impl ProgressReporter { + fn new(source: &str) -> Self { + let style = indicatif::ProgressStyle::default_bar() + .template("{spinner:.dim} {elapsed} {prefix} {wide_msg:.dim}") + .unwrap(); + let pb = ProgressBar::new(0); + pb.set_style(style); + let progress = pixi_progress::global_multi_progress().add(pb); + progress.set_prefix(format!("building package: {}", source)); + progress.enable_steady_tick(Duration::from_millis(100)); + + Self { + progress_bar: progress, + } + } +} + +impl CondaBuildReporter for ProgressReporter { + /// Starts a progress bar that should currently be + /// [spinner] message + fn on_build_start(&self, _build_id: usize) -> usize { + // Create a new progress bar. + // Building the package + 0 + } + + fn on_build_end(&self, _operation: usize) { + // Finish the progress bar. + self.progress_bar.finish_with_message("build completed"); + } + + fn on_build_output(&self, _operation: usize, line: String) { + self.progress_bar.suspend(|| eprintln!("{}", line)) + } +} + pub async fn execute(args: Args) -> miette::Result<()> { let project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())? .with_cli_config(args.config_cli); @@ -48,11 +89,11 @@ pub async fn execute(args: Args) -> miette::Result<()> { .setup_protocol(SetupRequest { source_dir: project.root().to_path_buf(), build_tool_override: Default::default(), + build_id: 0, }) .await .into_diagnostic() .wrap_err("unable to setup the build-backend to build the project")?; - let conda_build_noop = NoopCondaBuildReporter::new(); // Construct a temporary directory to build the package in. This path is also // automatically removed after the build finishes. let pixi_dir = &project.pixi_dir(); @@ -71,6 +112,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .into_diagnostic() .context("failed to create temporary working directory in the .pixi directory")?; + let progress = Arc::new(ProgressReporter::new(project.name())); // Build platform virtual packages let build_platform_virtual_packages: Vec = project .default_environment() @@ -111,7 +153,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { outputs: None, work_directory: work_dir.path().to_path_buf(), }, - conda_build_noop.clone(), + progress.clone(), ) .await .wrap_err("during the building of the project the following error occurred")?; diff --git a/src/environment.rs b/src/environment.rs index 0655764e8..f9f045b00 100644 --- a/src/environment.rs +++ b/src/environment.rs @@ -10,9 +10,11 @@ use dialoguer::theme::ColorfulTheme; use distribution_types::{InstalledDist, Name}; use fancy_display::FancyDisplay; use futures::{stream, StreamExt, TryStreamExt}; +use indicatif::ProgressBar; use itertools::{Either, Itertools}; use miette::{IntoDiagnostic, WrapErr}; -use pixi_build_frontend::NoopCondaBuildReporter; +use parking_lot::Mutex; +use pixi_build_frontend::CondaBuildReporter; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeaturesExt, SystemRequirements}; use pixi_progress::{await_in_progress, global_multi_progress}; @@ -25,11 +27,12 @@ use rattler_conda_types::{GenericVirtualPackage, Platform, PrefixRecord, RepoDat use rattler_lock::{PypiIndexes, PypiPackageData, PypiPackageEnvironmentData}; use reqwest_middleware::ClientWithMiddleware; use serde::{Deserialize, Serialize}; +use std::time::Duration; use tokio::sync::Semaphore; use url::Url; use crate::{ - build::BuildContext, + build::{BuildContext, BuildReporter}, install_pypi, lock_file::{UpdateLockFileOptions, UvResolutionContext}, prefix::Prefix, @@ -488,6 +491,113 @@ impl PythonStatus { } } +struct CondaBuildProgress { + main_progress: ProgressBar, + build_progress: Mutex>, +} + +impl CondaBuildProgress { + fn new(num_packages: u64) -> Self { + // Create a new progress bar. + let pb = ProgressBar::hidden(); + pb.set_length(num_packages); + let pb = pixi_progress::global_multi_progress().add(pb); + pb.set_style(pixi_progress::default_progress_style()); + // Building the package + pb.set_prefix("building packages"); + pb.enable_steady_tick(Duration::from_millis(100)); + + Self { + main_progress: pb, + build_progress: Mutex::new(Vec::default()), + } + } +} + +impl CondaBuildProgress { + /// Associate a progress bar with a build identifier, and get a build id back + pub fn associate(&self, identifier: &str) -> usize { + let mut locked = self.build_progress.lock(); + let after = if locked.is_empty() { + &self.main_progress + } else { + &locked.last().unwrap().1 + }; + + let pb = pixi_progress::global_multi_progress().insert_after(after, ProgressBar::hidden()); + + locked.push((identifier.to_owned(), pb)); + locked.len() - 1 + } + + pub fn end_progress_for(&self, build_id: usize, alternative_message: Option) { + self.main_progress.inc(1); + if self.main_progress.position() + == self + .main_progress + .length() + .expect("expected length to be set for progress") + { + self.main_progress.finish_and_clear(); + // Clear all the build progress bars + for (_, pb) in self.build_progress.lock().iter() { + pb.finish_and_clear(); + } + return; + } + let locked = self.build_progress.lock(); + + // Finish the build progress bar + let (identifier, pb) = locked.get(build_id).unwrap(); + // If there is an alternative message, use that + let msg = if let Some(msg) = alternative_message { + pb.set_style(indicatif::ProgressStyle::with_template(" {msg}").unwrap()); + msg + } else { + // Otherwise show the default message + pb.set_style( + indicatif::ProgressStyle::with_template(" {msg} in {elapsed}").unwrap(), + ); + "built".to_string() + }; + pb.finish_with_message(format!("✔ {msg}: {identifier}")); + } +} + +impl CondaBuildReporter for CondaBuildProgress { + fn on_build_start(&self, build_id: usize) -> usize { + // Actually show the progress + let locked = self.build_progress.lock(); + let (identifier, pb) = locked.get(build_id).unwrap(); + let template = + indicatif::ProgressStyle::with_template(" {spinner:.green} {msg} {elapsed}") + .unwrap(); + pb.set_style(template); + pb.set_message(format!("building {identifier}")); + pb.enable_steady_tick(Duration::from_millis(100)); + // We keep operation and build id the same + build_id + } + + fn on_build_end(&self, operation: usize) { + self.end_progress_for(operation, None); + } + + fn on_build_output(&self, _operation: usize, line: String) { + self.main_progress.suspend(|| eprintln!("{}", line)); + } +} + +impl BuildReporter for CondaBuildProgress { + fn on_build_cached(&self, build_id: usize) { + self.end_progress_for(build_id, Some("cached".to_string())); + } + + fn as_conda_build_reporter(self: Arc) -> Arc { + self.clone() + } +} + /// Updates the environment to contain the packages from the specified lock-file #[allow(clippy::too_many_arguments)] pub async fn update_prefix_conda( @@ -514,11 +624,20 @@ pub async fn update_prefix_conda( PixiRecord::Source(record) => Either::Right(record), }); + let mut progress_reporter = None; + let source_records_length = source_records.len(); // Build conda packages out of the source records let mut processed_source_packages = stream::iter(source_records) .map(Ok) .and_then(|record| { - let noop_build_reporter = NoopCondaBuildReporter::new(); + // If we don't have a progress reporter, create one + // This is done so that the progress bars are not displayed if there are no source packages + let progress_reporter = progress_reporter + .get_or_insert_with(|| { + Arc::new(CondaBuildProgress::new(source_records_length as u64)) + }) + .clone(); + let build_id = progress_reporter.associate(record.package_record.name.as_source()); let build_context = &build_context; let channels = &channels; let virtual_packages = &virtual_packages; @@ -530,7 +649,8 @@ pub async fn update_prefix_conda( platform, virtual_packages.clone(), virtual_packages.clone(), - noop_build_reporter.clone(), + progress_reporter.clone(), + build_id, ) .await } diff --git a/src/lock_file/reporter/mod.rs b/src/lock_file/reporter/mod.rs index 608ddf42a..b8996d410 100644 --- a/src/lock_file/reporter/mod.rs +++ b/src/lock_file/reporter/mod.rs @@ -3,5 +3,6 @@ mod progress_bar; mod purl_amend; pub(crate) use gateway_reporter::GatewayProgressReporter; +pub(crate) use progress_bar::CondaMetadataProgress; pub(crate) use progress_bar::SolveProgressBar; pub(crate) use purl_amend::PurlAmendReporter; diff --git a/src/lock_file/reporter/progress_bar.rs b/src/lock_file/reporter/progress_bar.rs index 5d1350cca..c0fd833d5 100644 --- a/src/lock_file/reporter/progress_bar.rs +++ b/src/lock_file/reporter/progress_bar.rs @@ -1,12 +1,13 @@ use std::{borrow::Cow, fmt::Write, sync::Arc, time::Duration}; use indicatif::{HumanBytes, ProgressBar, ProgressState}; +use pixi_build_frontend::CondaMetadataReporter; use pixi_consts::consts; use pypi_mapping::Reporter; use rattler_conda_types::Platform; use super::PurlAmendReporter; -use crate::project::grouped_environment::GroupedEnvironmentName; +use crate::{build::BuildMetadataReporter, project::grouped_environment::GroupedEnvironmentName}; /// A helper struct that manages a progress-bar for solving an environment. #[derive(Clone)] @@ -104,3 +105,70 @@ impl SolveProgressBar { Arc::new(PurlAmendReporter::new(self.clone())) } } + +/// Struct that manages the progress for getting source metadata. +pub(crate) struct CondaMetadataProgress { + progress_bar: ProgressBar, +} + +impl CondaMetadataProgress { + /// Creates a new progress bar for the metadata, and activates it + pub(crate) fn new(original_progress: &ProgressBar, num_packages: u64) -> Self { + // Create a new progress bar. + let pb = pixi_progress::global_multi_progress() + .insert_after(original_progress, ProgressBar::hidden()); + pb.set_length(num_packages); + pb.set_style(pixi_progress::default_progress_style()); + // Building the package + pb.set_prefix("retrieving metadata"); + pb.enable_steady_tick(Duration::from_millis(100)); + Self { progress_bar: pb } + } +} + +impl CondaMetadataProgress { + /// Use this method to increment the progress bar + /// It will also check if the progress bar is finished + pub fn increment(&self) { + self.progress_bar.inc(1); + self.check_finish(); + } + + /// Check if the progress bar is finished + /// and clears it + fn check_finish(&self) { + if self.progress_bar.position() + == self + .progress_bar + .length() + .expect("expected length to be set for progress") + { + self.progress_bar.set_message(""); + self.progress_bar.finish_and_clear(); + } + } +} + +impl CondaMetadataReporter for CondaMetadataProgress { + fn on_metadata_start(&self, _build_id: usize) -> usize { + // Started metadata extraction + self.progress_bar.set_message("extracting"); + 0 + } + + fn on_metadata_end(&self, _operation: usize) { + // Finished metadata extraction + self.increment(); + } +} + +// This is the same but for the cached variants +impl BuildMetadataReporter for CondaMetadataProgress { + fn on_metadata_cached(&self, _build_id: usize) { + self.increment(); + } + + fn as_conda_metadata_reporter(self: Arc) -> Arc { + self.clone() + } +} diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index 9aa90842d..ffe339826 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -16,7 +16,6 @@ use indexmap::IndexSet; use indicatif::ProgressBar; use itertools::Itertools; use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, MietteDiagnostic, Report, WrapErr}; -use pixi_build_frontend::NoopCondaMetadataReporter; use pixi_config::get_cache_dir; use pixi_consts::consts; use pixi_manifest::{EnvironmentName, FeaturesExt, HasEnvironmentDependencies, HasFeaturesIter}; @@ -49,7 +48,8 @@ use crate::{ self, write_environment_file, EnvironmentFile, LockFileUsage, PerEnvironmentAndPlatform, PerGroup, PerGroupAndPlatform, PythonStatus, }, - load_lock_file, lock_file, + load_lock_file, + lock_file::{self, reporter::CondaMetadataProgress}, prefix::Prefix, project::{ grouped_environment::{GroupedEnvironment, GroupedEnvironmentName}, @@ -1643,10 +1643,17 @@ async fn spawn_solve_conda_environment_task( .collect::, _>>() .into_diagnostic()?; - let noop_conda_metadata_reporter = NoopCondaMetadataReporter::new(); + let mut metadata_progress = None; let mut source_match_specs = Vec::new(); let source_futures = FuturesUnordered::new(); - for (name, source_spec) in source_specs.iter() { + for (build_id, (name, source_spec)) in source_specs.iter().enumerate() { + // Create a metadata reporter if it doesn't exist yet. + let metadata_reporter = metadata_progress.get_or_insert_with(|| { + Arc::new(CondaMetadataProgress::new( + &pb.pb, + source_specs.len() as u64, + )) + }); source_futures.push( build_context .extract_source_metadata( @@ -1656,7 +1663,8 @@ async fn spawn_solve_conda_environment_task( virtual_packages.clone(), platform, virtual_packages.clone(), - noop_conda_metadata_reporter.clone(), + metadata_reporter.clone(), + build_id, ) .map_err(|e| { Report::new(e).wrap_err(format!(