diff --git a/Cargo.lock b/Cargo.lock
index b708af82d..5ff8fbfcd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -40,6 +40,18 @@ dependencies = [
"cpufeatures",
]
+[[package]]
+name = "ahash"
+version = "0.8.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b79b82693f705137f8fb9b37871d99e4f9a7df12b917eed79c3d3954830a60b"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "version_check",
+ "zerocopy",
+]
+
[[package]]
name = "aho-corasick"
version = "1.1.2"
@@ -55,6 +67,12 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd"
+[[package]]
+name = "allocator-api2"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
+
[[package]]
name = "android-tzdata"
version = "0.1.1"
@@ -70,6 +88,19 @@ dependencies = [
"libc",
]
+[[package]]
+name = "ansi-to-tui"
+version = "4.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8438af3d7e7dccdb98eff55e5351587d9bec2294daff505fc9a061bd14d22db0"
+dependencies = [
+ "nom",
+ "ratatui",
+ "simdutf8",
+ "smallvec",
+ "thiserror",
+]
+
[[package]]
name = "anstream"
version = "0.6.12"
@@ -575,6 +606,21 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bf2a5fb3207c12b5d208ebc145f967fea5cac41a021c37417ccc31ba40f39ee"
+[[package]]
+name = "cassowary"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53"
+
+[[package]]
+name = "castaway"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a17ed5635fc8536268e5d4de1e22e81ac34419e5f052d4d51f4e01dcc263fcc"
+dependencies = [
+ "rustversion",
+]
+
[[package]]
name = "cc"
version = "1.0.88"
@@ -728,6 +774,19 @@ dependencies = [
"unicode-width",
]
+[[package]]
+name = "compact_str"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f"
+dependencies = [
+ "castaway",
+ "cfg-if",
+ "itoa",
+ "ryu",
+ "static_assertions",
+]
+
[[package]]
name = "concurrent-queue"
version = "2.4.0"
@@ -838,8 +897,12 @@ checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df"
dependencies = [
"bitflags 2.4.2",
"crossterm_winapi",
+ "futures-core",
"libc",
+ "mio",
"parking_lot",
+ "signal-hook",
+ "signal-hook-mio",
"winapi",
]
@@ -1496,6 +1559,10 @@ name = "hashbrown"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
+dependencies = [
+ "ahash",
+ "allocator-api2",
+]
[[package]]
name = "heck"
@@ -1793,6 +1860,12 @@ dependencies = [
"vt100",
]
+[[package]]
+name = "indoc"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
+
[[package]]
name = "inout"
version = "0.1.3"
@@ -2013,6 +2086,15 @@ version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
+[[package]]
+name = "lru"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc"
+dependencies = [
+ "hashbrown 0.14.3",
+]
+
[[package]]
name = "lzma-sys"
version = "0.1.20"
@@ -2197,6 +2279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09"
dependencies = [
"libc",
+ "log",
"wasi",
"windows-sys 0.48.0",
]
@@ -2531,6 +2614,12 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "paste"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c"
+
[[package]]
name = "pathdiff"
version = "0.2.1"
@@ -2895,6 +2984,26 @@ dependencies = [
"getrandom",
]
+[[package]]
+name = "ratatui"
+version = "0.26.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcb12f8fbf6c62614b0d56eb352af54f6a22410c3b079eb53ee93c7b97dd31d8"
+dependencies = [
+ "bitflags 2.4.2",
+ "cassowary",
+ "compact_str",
+ "crossterm",
+ "indoc",
+ "itertools",
+ "lru",
+ "paste",
+ "stability",
+ "strum 0.26.1",
+ "unicode-segmentation",
+ "unicode-width",
+]
+
[[package]]
name = "rattler"
version = "0.19.0"
@@ -2945,6 +3054,7 @@ dependencies = [
name = "rattler-build"
version = "0.13.0"
dependencies = [
+ "ansi-to-tui",
"anyhow",
"async-once-cell",
"base64 0.22.0",
@@ -2956,6 +3066,7 @@ dependencies = [
"comfy-table",
"console",
"content_inspector",
+ "crossterm",
"dunce",
"flate2",
"fs-err",
@@ -2979,6 +3090,7 @@ dependencies = [
"os_pipe",
"pathdiff",
"petgraph",
+ "ratatui",
"rattler",
"rattler_conda_types",
"rattler_digest 0.19.0",
@@ -3005,6 +3117,7 @@ dependencies = [
"tempfile",
"terminal_size",
"thiserror",
+ "throbber-widgets-tui",
"tokio",
"tokio-util",
"toml",
@@ -3013,6 +3126,7 @@ dependencies = [
"tracing-indicatif",
"tracing-subscriber",
"tracing-test",
+ "tui-input",
"url",
"walkdir",
"which",
@@ -3928,6 +4042,27 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+[[package]]
+name = "signal-hook"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801"
+dependencies = [
+ "libc",
+ "signal-hook-registry",
+]
+
+[[package]]
+name = "signal-hook-mio"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af"
+dependencies = [
+ "libc",
+ "mio",
+ "signal-hook",
+]
+
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
@@ -3937,6 +4072,12 @@ dependencies = [
"libc",
]
+[[package]]
+name = "simdutf8"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
+
[[package]]
name = "similar"
version = "2.4.0"
@@ -4036,6 +4177,16 @@ dependencies = [
"xxhash-rust",
]
+[[package]]
+name = "stability"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebd1b177894da2a2d9120208c3386066af06a488255caabc5de8ddca22dbc3ce"
+dependencies = [
+ "quote",
+ "syn 1.0.109",
+]
+
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
@@ -4287,6 +4438,16 @@ dependencies = [
"once_cell",
]
+[[package]]
+name = "throbber-widgets-tui"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a99cd25e625314594c7eef24bfad0e7948cb37f697ca54c6a97f7987c92d4a49"
+dependencies = [
+ "rand",
+ "ratatui",
+]
+
[[package]]
name = "time"
version = "0.3.34"
@@ -4591,6 +4752,16 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+[[package]]
+name = "tui-input"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3e785f863a3af4c800a2a669d0b64c879b538738e352607e2624d03f868dc01"
+dependencies = [
+ "crossterm",
+ "unicode-width",
+]
+
[[package]]
name = "typenum"
version = "1.17.0"
@@ -4644,6 +4815,12 @@ dependencies = [
"tinyvec",
]
+[[package]]
+name = "unicode-segmentation"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
+
[[package]]
name = "unicode-width"
version = "0.1.11"
@@ -5222,6 +5399,26 @@ dependencies = [
"zvariant",
]
+[[package]]
+name = "zerocopy"
+version = "0.7.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.7.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.52",
+]
+
[[package]]
name = "zip"
version = "0.6.6"
diff --git a/Cargo.toml b/Cargo.toml
index 7adefa5e8..687028f29 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -16,6 +16,7 @@ documentation = "https://prefix-dev.github.io/rattler-build"
default = ['native-tls']
native-tls = ['reqwest/native-tls', 'rattler/native-tls', 'rattler_installs_packages/native-tls']
rustls-tls = ['reqwest/rustls-tls', 'reqwest/rustls-tls-native-roots', 'rattler/rustls-tls', 'rattler_installs_packages/rustls-tls']
+tui = ['ratatui', 'crossterm', 'ansi-to-tui', 'throbber-widgets-tui', 'tui-input']
[dependencies]
serde = { version = "1.0.197", features = ["derive"] }
@@ -109,6 +110,11 @@ rattler_installs_packages = { version = "0.8.1", default-features = false }
async-once-cell = "0.5.3"
terminal_size = "0.3.0"
memchr = "2.7.1"
+ratatui = { version = "0.26.1", optional = true }
+crossterm = { version = "0.27.0", features = ["event-stream"], optional = true }
+ansi-to-tui = { version = "4.0.1", optional = true }
+throbber-widgets-tui = { version = "0.4.1", optional = true }
+tui-input = { version = "0.8.0", optional = true }
[dev-dependencies]
insta = { version = "1.36.1", features = ["yaml"] }
diff --git a/README.md b/README.md
index db47bd20c..4a9b3e031 100644
--- a/README.md
+++ b/README.md
@@ -114,6 +114,8 @@ The `build` command takes a `--recipe recipe.yaml` as input and produces a
package as output. The `test` subcommand can be used to test existing packages
(tests are shipped with the package).
+There is also a [terminal user interface (TUI)](https://prefix-dev.github.io/rattler-build/latest/tui/) that can help with building multiple packages and easily viewing logs.
+
### The recipe format
> **Note** You can find all examples below in the [`examples`](https://github.com/prefix-dev/rattler-build/tree/main/examples)
@@ -343,4 +345,5 @@ IF %ERRORLEVEL% NEQ 0 exit 1
ninja install --verbose
```
+
diff --git a/docs/tui.md b/docs/tui.md
new file mode 100644
index 000000000..cf48c3548
--- /dev/null
+++ b/docs/tui.md
@@ -0,0 +1,25 @@
+# Terminal User Interface
+
+`rattler-build` offers a terminal user interface for building multiple packages and viewing the logs.
+
+![rattler-build-tui](https://github.com/prefix-dev/rattler-build/assets/24392180/52138fd0-3c53-4028-a2c8-3099222c368a)
+
+To launch the TUI, run the `build` command with the `--tui` flag as shown below:
+
+```shell
+$ rattler-build build -r recipe.yml --tui
+```
+
+!!! note
+ `rattler-build-tui` is gated behind the `tui` feature flag to avoid extra dependencies. Build the project with `--features tui` arguments to enable the TUI functionality.
+
+#### Key Bindings
+
+| Key | Action |
+| -------------------------------------------------------------- | ------------------------------------------------ |
+| Enter | Build package |
+| j/k | Next/previous package |
+| up/down/left/right | Scroll logs |
+| e | Edit recipe (via `$EDITOR`) |
+| c, : | Open command prompt (available commands: `edit`) |
+| q, ctrl-c, esc, | Quit |
diff --git a/mkdocs.yml b/mkdocs.yml
index 73c502dbb..bfa24e626 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -113,6 +113,7 @@ nav:
- Compilers and cross compilation: compilers.md
- CLI Usage: cli_usage.md
+ - Terminal User Interface (TUI): tui.md
- Authentication & upload: authentication_and_upload.md
- Automatic recipe linting: automatic_linting.md
diff --git a/src/build.rs b/src/build.rs
index 15798e41e..c6199cfde 100644
--- a/src/build.rs
+++ b/src/build.rs
@@ -14,8 +14,13 @@ use crate::{package_test, tool_configuration};
/// and execute the build script. Returns the path to the resulting package.
pub async fn run_build(
output: Output,
- tool_configuration: tool_configuration::Configuration,
+ tool_configuration: &tool_configuration::Configuration,
) -> miette::Result<(Output, PathBuf)> {
+ output
+ .build_configuration
+ .directories
+ .create_build_dir()
+ .into_diagnostic()?;
if output.build_string().is_none() {
miette::bail!("Build string is not set for {:?}", output.name());
}
@@ -32,12 +37,12 @@ pub async fn run_build(
.into_diagnostic()?;
let output = output
- .fetch_sources(&tool_configuration)
+ .fetch_sources(tool_configuration)
.await
.into_diagnostic()?;
let output = output
- .resolve_dependencies(&tool_configuration)
+ .resolve_dependencies(tool_configuration)
.await
.into_diagnostic()?;
@@ -45,7 +50,7 @@ pub async fn run_build(
// Package all the new files
let (result, paths_json) = output
- .create_package(&tool_configuration)
+ .create_package(tool_configuration)
.await
.into_diagnostic()?;
diff --git a/src/console_utils.rs b/src/console_utils.rs
index d52dda7aa..690218403 100644
--- a/src/console_utils.rs
+++ b/src/console_utils.rs
@@ -11,6 +11,7 @@ use std::{
};
use tracing::{field, Level};
use tracing_core::{span::Id, Event, Field, Subscriber};
+use tracing_subscriber::util::SubscriberInitExt;
use tracing_subscriber::{
filter::{Directive, ParseError},
fmt::{
@@ -20,7 +21,6 @@ use tracing_subscriber::{
},
layer::{Context, SubscriberExt},
registry::LookupSpan,
- util::SubscriberInitExt,
EnvFilter, Layer,
};
@@ -486,6 +486,9 @@ pub fn init_logging(
log_style: &LogStyle,
verbosity: &Verbosity,
color: &Color,
+ #[cfg(feature = "tui")] tui_log_sender: Option<
+ tokio::sync::mpsc::UnboundedSender,
+ >,
) -> Result {
let log_handler = LoggingOutputHandler::default();
@@ -513,6 +516,20 @@ pub fn init_logging(
let registry = registry.with(GitHubActionsLayer(github_integration_enabled()));
+ #[cfg(feature = "tui")]
+ {
+ if let Some(tui_log_sender) = tui_log_sender {
+ log_handler.set_progress_bars_hidden(true);
+ let writer = crate::tui::logger::TuiOutputHandler {
+ log_sender: tui_log_sender,
+ };
+ registry
+ .with(fmt::layer().with_writer(writer).without_time())
+ .init();
+ return Ok(log_handler);
+ }
+ }
+
match log_style {
LogStyle::Fancy => {
registry.with(log_handler.clone()).init();
@@ -529,11 +546,7 @@ pub fn init_logging(
LogStyle::Json => {
log_handler.set_progress_bars_hidden(true);
registry
- .with(
- tracing_subscriber::fmt::layer()
- .json()
- .with_writer(io::stderr),
- )
+ .with(fmt::layer().json().with_writer(io::stderr))
.init();
}
}
diff --git a/src/lib.rs b/src/lib.rs
index bf5849615..bf47d9f55 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,10 +1,11 @@
#![deny(missing_docs)]
-//! The library pieces of rattler-build
+//! rattler-build library.
pub mod build;
pub mod console_utils;
pub mod metadata;
+pub mod opt;
pub mod package_test;
pub mod packaging;
pub mod recipe;
@@ -14,6 +15,8 @@ pub mod selectors;
pub mod source;
pub mod system_tools;
pub mod tool_configuration;
+#[cfg(feature = "tui")]
+pub mod tui;
pub mod used_variables;
pub mod utils;
pub mod variant_config;
@@ -23,6 +26,495 @@ pub mod hash;
mod linux;
mod macos;
mod post_process;
+pub mod rebuild;
pub mod recipe_generator;
mod unix;
+pub mod upload;
mod windows;
+
+use chrono::{DateTime, Utc};
+use dunce::canonicalize;
+use fs_err as fs;
+use miette::IntoDiagnostic;
+use rattler_conda_types::{package::ArchiveType, Platform};
+use std::{
+ collections::BTreeMap,
+ env::current_dir,
+ path::{Path, PathBuf},
+ str::FromStr,
+ sync::{Arc, Mutex},
+};
+
+use {
+ build::run_build,
+ console_utils::LoggingOutputHandler,
+ hash::HashInfo,
+ metadata::{
+ BuildConfiguration, BuildSummary, Directories, PackageIdentifier, PackagingSettings,
+ },
+ opt::*,
+ package_test::TestConfiguration,
+ recipe::{
+ parser::{find_outputs_from_src, Recipe},
+ ParsingError,
+ },
+ selectors::SelectorConfig,
+ system_tools::SystemTools,
+ variant_config::{ParseErrors, VariantConfig},
+};
+
+/// Directories that will be created for the build process.
+#[derive(Debug, Clone)]
+pub struct DirectoryInfo {
+ /// Name of the directory.
+ pub name: String,
+ /// Recipe path.
+ pub recipe_path: PathBuf,
+ /// Output directory.
+ pub output_dir: PathBuf,
+ /// No build ID flag.
+ pub no_build_id: bool,
+ /// Timestamp.
+ pub timestamp: DateTime,
+}
+
+/// Wrapper for multiple outputs.
+#[derive(Clone, Debug)]
+pub struct BuildOutput {
+ /// Build outputs.
+ pub outputs: Vec,
+ /// Tool configuration.
+ pub tool_config: tool_configuration::Configuration,
+ /// Recipe path.
+ pub recipe_path: PathBuf,
+}
+
+/// Returns the recipe path.
+pub fn get_recipe_path(path: &Path) -> miette::Result {
+ let recipe_path = canonicalize(path);
+ if let Err(e) = &recipe_path {
+ match e.kind() {
+ std::io::ErrorKind::NotFound => {
+ return Err(miette::miette!(
+ "The file {} could not be found.",
+ path.to_string_lossy()
+ ));
+ }
+ std::io::ErrorKind::PermissionDenied => {
+ return Err(miette::miette!(
+ "Permission denied when trying to access the file {}.",
+ path.to_string_lossy()
+ ));
+ }
+ _ => {
+ return Err(miette::miette!(
+ "An unknown error occurred while trying to access the file {}: {:?}",
+ path.to_string_lossy(),
+ e
+ ));
+ }
+ }
+ }
+ let mut recipe_path = recipe_path.into_diagnostic()?;
+
+ // If the recipe_path is a directory, look for "recipe.yaml" in the directory.
+ if recipe_path.is_dir() {
+ let recipe_yaml_path = recipe_path.join("recipe.yaml");
+ if recipe_yaml_path.exists() && recipe_yaml_path.is_file() {
+ recipe_path = recipe_yaml_path;
+ } else {
+ return Err(miette::miette!(
+ "'recipe.yaml' not found in the directory {}",
+ path.to_string_lossy()
+ ));
+ }
+ }
+
+ Ok(recipe_path)
+}
+
+/// Returns the output for the build.
+pub async fn get_build_output(
+ args: BuildOpts,
+ recipe_path: PathBuf,
+ fancy_log_handler: LoggingOutputHandler,
+) -> miette::Result {
+ let output_dir = args
+ .common
+ .output_dir
+ .unwrap_or(current_dir().into_diagnostic()?.join("output"));
+ if output_dir.starts_with(
+ recipe_path
+ .parent()
+ .expect("Could not get parent of recipe"),
+ ) {
+ return Err(miette::miette!(
+ "The output directory cannot be in the recipe directory.\nThe current output directory is: {}\nSelect a different output directory with the --output-dir option or set the CONDA_BLD_PATH environment variable"
+ , output_dir.to_string_lossy()));
+ }
+
+ let recipe_text = fs::read_to_string(&recipe_path).into_diagnostic()?;
+
+ let host_platform = if let Some(target_platform) = args.target_platform {
+ Platform::from_str(&target_platform).into_diagnostic()?
+ } else {
+ Platform::current()
+ };
+
+ let selector_config = SelectorConfig {
+ // We ignore noarch here
+ target_platform: host_platform,
+ hash: None,
+ build_platform: Platform::current(),
+ variant: BTreeMap::new(),
+ experimental: args.common.experimental,
+ };
+
+ let span = tracing::info_span!("Finding outputs from recipe");
+ tracing::info!("Target platform: {}", host_platform);
+ let enter = span.enter();
+ // First find all outputs from the recipe
+ let outputs = find_outputs_from_src(&recipe_text)?;
+
+ let variant_config =
+ VariantConfig::from_files(&args.variant_config, &selector_config).into_diagnostic()?;
+
+ let outputs_and_variants =
+ variant_config.find_variants(&outputs, &recipe_text, &selector_config)?;
+
+ tracing::info!("Found {} variants\n", outputs_and_variants.len());
+ for discovered_output in &outputs_and_variants {
+ tracing::info!(
+ "Build variant: {}-{}-{}",
+ discovered_output.name,
+ discovered_output.version,
+ discovered_output.build_string
+ );
+
+ let mut table = comfy_table::Table::new();
+ table
+ .load_preset(comfy_table::presets::UTF8_FULL_CONDENSED)
+ .apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
+ .set_header(vec!["Variant", "Version"]);
+ for (key, value) in discovered_output.used_vars.iter() {
+ table.add_row(vec![key, value]);
+ }
+ tracing::info!("\n{}\n", table);
+ }
+ drop(enter);
+
+ let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
+
+ let tool_config = tool_configuration::Configuration {
+ client,
+ fancy_log_handler: fancy_log_handler.clone(),
+ no_clean: args.keep_build,
+ no_test: args.no_test,
+ use_zstd: args.common.use_zstd,
+ use_bz2: args.common.use_bz2,
+ };
+
+ let mut subpackages = BTreeMap::new();
+ let mut outputs = Vec::new();
+ for discovered_output in outputs_and_variants {
+ let hash =
+ HashInfo::from_variant(&discovered_output.used_vars, &discovered_output.noarch_type);
+
+ let selector_config = SelectorConfig {
+ variant: discovered_output.used_vars.clone(),
+ hash: Some(hash.clone()),
+ target_platform: selector_config.target_platform,
+ build_platform: selector_config.build_platform,
+ experimental: args.common.experimental,
+ };
+
+ let recipe =
+ Recipe::from_node(&discovered_output.node, selector_config).map_err(|err| {
+ let errs: ParseErrors = err
+ .into_iter()
+ .map(|err| ParsingError::from_partial(&recipe_text, err))
+ .collect::>()
+ .into();
+ errs
+ })?;
+
+ if recipe.build().skip() {
+ tracing::info!(
+ "Skipping build for variant: {:#?}",
+ discovered_output.used_vars
+ );
+ continue;
+ }
+
+ subpackages.insert(
+ recipe.package().name().clone(),
+ PackageIdentifier {
+ name: recipe.package().name().clone(),
+ version: recipe.package().version().to_owned(),
+ build_string: recipe
+ .build()
+ .string()
+ .expect("Shouldn't be unset, needs major refactoring, for handling this better")
+ .to_owned(),
+ },
+ );
+
+ let name = recipe.package().name().clone();
+ // Add the channels from the args and by default always conda-forge
+ let channels = args
+ .channel
+ .clone()
+ .unwrap_or_else(|| vec!["conda-forge".to_string()]);
+
+ let timestamp = chrono::Utc::now();
+
+ let output = metadata::Output {
+ recipe,
+ build_configuration: BuildConfiguration {
+ target_platform: discovered_output.target_platform,
+ host_platform,
+ build_platform: Platform::current(),
+ hash,
+ variant: discovered_output.used_vars.clone(),
+ directories: Directories::setup(
+ name.as_normalized(),
+ &recipe_path,
+ &output_dir,
+ args.no_build_id,
+ ×tamp,
+ )
+ .into_diagnostic()?,
+ channels,
+ timestamp,
+ subpackages: subpackages.clone(),
+ packaging_settings: PackagingSettings::from_args(
+ args.package_format.archive_type,
+ args.package_format.compression_level,
+ args.compression_threads,
+ ),
+ store_recipe: !args.no_include_recipe,
+ force_colors: args.color_build_log && console::colors_enabled(),
+ },
+ finalized_dependencies: None,
+ finalized_sources: None,
+ system_tools: SystemTools::new(),
+ build_summary: Arc::new(Mutex::new(BuildSummary::default())),
+ };
+
+ if args.render_only {
+ let resolved = output
+ .resolve_dependencies(&tool_config)
+ .await
+ .into_diagnostic()?;
+ println!("{}", serde_json::to_string_pretty(&resolved).unwrap());
+ continue;
+ }
+ outputs.push(output);
+ }
+
+ Ok(BuildOutput {
+ outputs,
+ tool_config,
+ recipe_path,
+ })
+}
+
+/// Runs build.
+pub async fn run_build_from_args(build_output: BuildOutput) -> miette::Result<()> {
+ let mut outputs: Vec = Vec::new();
+ for output in build_output.outputs {
+ let output = match run_build(output, &build_output.tool_config).await {
+ Ok((output, _archive)) => {
+ output.record_build_end();
+ output
+ }
+ Err(e) => {
+ tracing::error!("Error building package: {}", e);
+ return Err(e);
+ }
+ };
+ outputs.push(output);
+ }
+
+ let span = tracing::info_span!("Build summary");
+ let _enter = span.enter();
+ for output in outputs {
+ // print summaries for each output
+ let _ = output.log_build_summary().map_err(|e| {
+ tracing::error!("Error writing build summary: {}", e);
+ e
+ });
+ }
+
+ Ok(())
+}
+
+/// Runs test.
+pub async fn run_test_from_args(
+ args: TestOpts,
+ fancy_log_handler: LoggingOutputHandler,
+) -> miette::Result<()> {
+ let package_file = canonicalize(args.package_file).into_diagnostic()?;
+ let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
+
+ let tempdir = tempfile::tempdir().into_diagnostic()?;
+
+ let test_options = TestConfiguration {
+ test_prefix: tempdir.path().to_path_buf(),
+ target_platform: None,
+ keep_test_prefix: false,
+ channels: args
+ .channel
+ .unwrap_or_else(|| vec!["conda-forge".to_string()]),
+ tool_configuration: tool_configuration::Configuration {
+ client,
+ fancy_log_handler,
+ // duplicate from `keep_test_prefix`?
+ no_clean: false,
+ ..Default::default()
+ },
+ };
+
+ let package_name = package_file
+ .file_name()
+ .ok_or_else(|| miette::miette!("Could not get file name from package file"))?
+ .to_string_lossy()
+ .to_string();
+
+ let span = tracing::info_span!("Running tests for ", recipe = %package_name);
+ let _enter = span.enter();
+ package_test::run_test(&package_file, &test_options)
+ .await
+ .into_diagnostic()?;
+
+ Ok(())
+}
+
+/// Rebuild.
+pub async fn rebuild_from_args(
+ args: RebuildOpts,
+ fancy_log_handler: LoggingOutputHandler,
+) -> miette::Result<()> {
+ tracing::info!("Rebuilding {}", args.package_file.to_string_lossy());
+ // we extract the recipe folder from the package file (info/recipe/*)
+ // and then run the rendered recipe with the same arguments as the original build
+ let temp_folder = tempfile::tempdir().into_diagnostic()?;
+
+ rebuild::extract_recipe(&args.package_file, temp_folder.path()).into_diagnostic()?;
+
+ let temp_dir = temp_folder.into_path();
+
+ tracing::info!("Extracted recipe to: {:?}", temp_dir);
+
+ let rendered_recipe =
+ fs::read_to_string(temp_dir.join("rendered_recipe.yaml")).into_diagnostic()?;
+
+ let mut output: metadata::Output = serde_yaml::from_str(&rendered_recipe).into_diagnostic()?;
+
+ // set recipe dir to the temp folder
+ output.build_configuration.directories.recipe_dir = temp_dir;
+
+ // create output dir and set it in the config
+ let output_dir = args
+ .common
+ .output_dir
+ .unwrap_or(current_dir().into_diagnostic()?.join("output"));
+
+ fs::create_dir_all(&output_dir).into_diagnostic()?;
+ output.build_configuration.directories.output_dir =
+ canonicalize(output_dir).into_diagnostic()?;
+
+ let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
+
+ let tool_config = tool_configuration::Configuration {
+ client,
+ fancy_log_handler,
+ no_clean: true,
+ no_test: args.no_test,
+ use_zstd: args.common.use_zstd,
+ use_bz2: args.common.use_bz2,
+ };
+
+ output
+ .build_configuration
+ .directories
+ .recreate_directories()
+ .into_diagnostic()?;
+
+ run_build(output, &tool_config).await?;
+
+ Ok(())
+}
+
+/// Upload.
+pub async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
+ if args.package_files.is_empty() {
+ return Err(miette::miette!("No package files were provided."));
+ }
+
+ for package_file in &args.package_files {
+ if ArchiveType::try_from(package_file).is_none() {
+ return Err(miette::miette!(
+ "The file {} does not appear to be a conda package.",
+ package_file.to_string_lossy()
+ ));
+ }
+ }
+
+ let store = tool_configuration::get_auth_store(args.common.auth_file);
+
+ match args.server_type {
+ ServerType::Quetz(quetz_opts) => {
+ upload::upload_package_to_quetz(
+ &store,
+ quetz_opts.api_key,
+ &args.package_files,
+ quetz_opts.url,
+ quetz_opts.channel,
+ )
+ .await?;
+ }
+ ServerType::Artifactory(artifactory_opts) => {
+ upload::upload_package_to_artifactory(
+ &store,
+ artifactory_opts.username,
+ artifactory_opts.password,
+ &args.package_files,
+ artifactory_opts.url,
+ artifactory_opts.channel,
+ )
+ .await?;
+ }
+ ServerType::Prefix(prefix_opts) => {
+ upload::upload_package_to_prefix(
+ &store,
+ prefix_opts.api_key,
+ &args.package_files,
+ prefix_opts.url,
+ prefix_opts.channel,
+ )
+ .await?;
+ }
+ ServerType::Anaconda(anaconda_opts) => {
+ upload::upload_package_to_anaconda(
+ &store,
+ anaconda_opts.api_key,
+ &args.package_files,
+ anaconda_opts.url,
+ anaconda_opts.owner,
+ anaconda_opts.channel,
+ anaconda_opts.force,
+ )
+ .await?;
+ }
+ ServerType::CondaForge(conda_forge_opts) => {
+ upload::conda_forge::upload_packages_to_conda_forge(
+ conda_forge_opts,
+ &args.package_files,
+ )
+ .await?;
+ }
+ }
+
+ Ok(())
+}
diff --git a/src/main.rs b/src/main.rs
index fc7073a95..43d36f478 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,445 +1,34 @@
//! This is the main entry point for the `rattler-build` binary.
-use clap::{arg, crate_version, CommandFactory, Parser};
-
-use clap_verbosity_flag::{InfoLevel, Verbosity};
-use dunce::canonicalize;
-use fs_err as fs;
+use clap::{CommandFactory, Parser};
use miette::IntoDiagnostic;
-use rattler_conda_types::{package::ArchiveType, Platform};
-use rattler_package_streaming::write::CompressionLevel;
-use std::{
- collections::BTreeMap,
- env::current_dir,
- path::PathBuf,
- str::{self, FromStr},
- sync::{Arc, Mutex},
-};
-
-use url::Url;
-
use rattler_build::{
- build::run_build,
- console_utils::{init_logging, Color, LogStyle, LoggingOutputHandler},
- hash::HashInfo,
- metadata::{
- BuildConfiguration, BuildSummary, Directories, PackageIdentifier, PackagingSettings,
- },
- package_test::{self, TestConfiguration},
- recipe::{
- parser::{find_outputs_from_src, Recipe},
- ParsingError,
- },
- recipe_generator::{generate_recipe, GenerateRecipeOpts},
- selectors::SelectorConfig,
- system_tools::SystemTools,
- tool_configuration,
- variant_config::{ParseErrors, VariantConfig},
+ console_utils::init_logging,
+ get_build_output, get_recipe_path,
+ opt::{App, ShellCompletion, SubCommands},
+ rebuild_from_args,
+ recipe_generator::generate_recipe,
+ run_build_from_args, run_test_from_args, upload_from_args,
};
-mod rebuild;
-mod upload;
-
-#[derive(Parser)]
-enum SubCommands {
- /// Build a package
- Build(BuildOpts),
-
- /// Test a package
- Test(TestOpts),
-
- /// Rebuild a package
- Rebuild(RebuildOpts),
-
- /// Upload a package
- Upload(UploadOpts),
-
- /// Generate shell completion script
- Completion(ShellCompletion),
-
- /// Generate a recipe from PyPI or CRAN
- GenerateRecipe(GenerateRecipeOpts),
-
- /// Handle authentication to external repositories
- Auth(rattler::cli::auth::Args),
-}
-
-#[derive(Parser)]
-struct ShellCompletion {
- #[arg(short, long)]
- shell: Option,
-}
-
-#[derive(Parser)]
-#[clap(version = crate_version!())]
-struct App {
- #[clap(subcommand)]
- subcommand: Option,
-
- #[command(flatten)]
- verbose: Verbosity,
-
- /// Logging style
- #[clap(
- long,
- env = "RATTLER_BUILD_LOG_STYLE",
- default_value = "fancy",
- global = true
- )]
- log_style: LogStyle,
-
- /// Enable or disable colored output from rattler-build.
- /// Also honors the `CLICOLOR` and `CLICOLOR_FORCE` environment variable.
- #[clap(
- long,
- env = "RATTLER_BUILD_COLOR",
- default_value = "auto",
- global = true
- )]
- color: Color,
-}
-
-/// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands
-#[derive(Parser)]
-struct CommonOpts {
- /// Output directory for build artifacts. Defaults to `./output`.
- #[clap(long, env = "CONDA_BLD_PATH")]
- output_dir: Option,
-
- /// Enable support for repodata.json.zst
- #[clap(long, env = "RATTLER_ZSTD", default_value = "true", hide = true)]
- use_zstd: bool,
-
- /// Enable support for repodata.json.bz2
- #[clap(long, env = "RATTLER_BZ2", default_value = "true", hide = true)]
- use_bz2: bool,
-
- /// Enable experimental features
- #[arg(long, env = "RATTLER_BUILD_EXPERIMENTAL")]
- experimental: bool,
-
- /// Path to an auth-file to read authentication information from
- #[clap(long, env = "RATTLER_AUTH_FILE", hide = true)]
- auth_file: Option,
-}
-
-/// Container for the CLI package format and compression level
-#[derive(Clone, PartialEq, Eq, Debug)]
-struct PackageFormatAndCompression {
- /// The archive type that is selected
- pub archive_type: ArchiveType,
- /// The compression level that is selected
- pub compression_level: CompressionLevel,
-}
-
-// deserializer for the package format and compression level
-impl FromStr for PackageFormatAndCompression {
- type Err = String;
-
- fn from_str(s: &str) -> Result {
- let mut split = s.split(':');
- let package_format = split.next().ok_or("invalid")?;
-
- let compression = split.next().unwrap_or("default");
-
- // remove all non-alphanumeric characters
- let package_format = package_format
- .chars()
- .filter(|c| c.is_alphanumeric())
- .collect::();
-
- let archive_type = match package_format.to_lowercase().as_str() {
- "tarbz2" => ArchiveType::TarBz2,
- "conda" => ArchiveType::Conda,
- _ => return Err(format!("Unknown package format: {}", package_format)),
- };
-
- let compression_level = match compression {
- "max" | "highest" => CompressionLevel::Highest,
- "default" | "normal" => CompressionLevel::Default,
- "fast" | "lowest" | "min" => CompressionLevel::Lowest,
- number if number.parse::().is_ok() => {
- let number = number.parse::().unwrap_or_default();
- match archive_type {
- ArchiveType::TarBz2 => {
- if !(1..=9).contains(&number) {
- return Err("Compression level for .tar.bz2 must be between 1 and 9"
- .to_string());
- }
- }
- ArchiveType::Conda => {
- if !(-7..=22).contains(&number) {
- return Err(
- "Compression level for conda packages (zstd) must be between -7 and 22".to_string()
- );
- }
- }
- }
- CompressionLevel::Numeric(number)
- }
- _ => return Err(format!("Unknown compression level: {}", compression)),
- };
-
- Ok(PackageFormatAndCompression {
- archive_type,
- compression_level,
- })
- }
-}
-
-#[derive(Parser)]
-struct BuildOpts {
- /// The recipe file or directory containing `recipe.yaml`. Defaults to the current directory.
- #[arg(short, long, default_value = ".")]
- recipe: PathBuf,
-
- /// The target platform for the build.
- #[arg(long)]
- target_platform: Option,
-
- /// Add the channels needed for the recipe using this option. For more then one channel use it multiple times.
- /// The default channel is `conda-forge`.
- #[arg(short = 'c', long)]
- channel: Option>,
-
- /// Variant configuration files for the build.
- #[arg(short = 'm', long)]
- variant_config: Vec,
-
- /// Render the recipe files without executing the build.
- #[arg(long)]
- render_only: bool,
-
- /// Keep intermediate build artifacts after the build.
- #[arg(long)]
- keep_build: bool,
-
- /// Don't use build id(timestamp) when creating build directory name. Defaults to `false`.
- #[arg(long)]
- no_build_id: bool,
-
- /// The package format to use for the build. Can be one of `tar-bz2` or `conda`.
- /// You can also add a compression level to the package format, e.g. `tar-bz2:` (from 1 to 9) or `conda:` (from -7 to 22).
- #[arg(long, default_value = "conda")]
- package_format: PackageFormatAndCompression,
-
- #[arg(long)]
- /// The number of threads to use for compression (only relevant when also using `--package-format conda`)
- compression_threads: Option,
-
- /// Do not store the recipe in the final package
- #[arg(long)]
- no_include_recipe: bool,
-
- /// Do not run tests after building
- #[arg(long, default_value = "false")]
- no_test: bool,
-
- /// Do not force colors in the output of the build script
- #[arg(long, default_value = "true")]
- color_build_log: bool,
-
- #[clap(flatten)]
- common: CommonOpts,
-}
-
-#[derive(Parser)]
-struct TestOpts {
- /// Channels to use when testing
- #[arg(short = 'c', long)]
- channel: Option>,
-
- /// The package file to test
- #[arg(short, long)]
- package_file: PathBuf,
-
- #[clap(flatten)]
- common: CommonOpts,
-}
-
-#[derive(Parser)]
-struct RebuildOpts {
- /// The package file to rebuild
- #[arg(short, long)]
- package_file: PathBuf,
-
- /// Do not run tests after building
- #[arg(long, default_value = "false")]
- no_test: bool,
-
- #[clap(flatten)]
- common: CommonOpts,
-}
-
-#[derive(Parser)]
-struct UploadOpts {
- /// The package file to upload
- #[arg(global = true, required = false)]
- package_files: Vec,
-
- /// The server type
- #[clap(subcommand)]
- server_type: ServerType,
-
- #[clap(flatten)]
- common: CommonOpts,
-}
-
-#[derive(Clone, Debug, PartialEq, Parser)]
-enum ServerType {
- Quetz(QuetzOpts),
- Artifactory(ArtifactoryOpts),
- Prefix(PrefixOpts),
- Anaconda(AnacondaOpts),
- #[clap(hide = true)]
- CondaForge(CondaForgeOpts),
-}
-
-#[derive(Clone, Debug, PartialEq, Parser)]
-/// Options for uploading to a Quetz server.
-/// Authentication is used from the keychain / auth-file.
-struct QuetzOpts {
- /// The URL to your Quetz server
- #[arg(short, long, env = "QUETZ_SERVER_URL")]
- url: Url,
-
- /// The URL to your channel
- #[arg(short, long, env = "QUETZ_CHANNEL")]
- channel: String,
-
- /// The Quetz API key, if none is provided, the token is read from the keychain / auth-file
- #[arg(short, long, env = "QUETZ_API_KEY")]
- api_key: Option,
-}
-
-#[derive(Clone, Debug, PartialEq, Parser)]
-/// Options for uploading to a Artifactory channel.
-/// Authentication is used from the keychain / auth-file.
-struct ArtifactoryOpts {
- /// The URL to your Artifactory server
- #[arg(short, long, env = "ARTIFACTORY_SERVER_URL")]
- url: Url,
-
- /// The URL to your channel
- #[arg(short, long, env = "ARTIFACTORY_CHANNEL")]
- channel: String,
-
- /// Your Artifactory username
- #[arg(short = 'r', long, env = "ARTIFACTORY_USERNAME")]
- username: Option,
-
- /// Your Artifactory password
- #[arg(short, long, env = "ARTIFACTORY_PASSWORD")]
- password: Option,
-}
-
-/// Options for uploading to a prefix.dev server.
-/// Authentication is used from the keychain / auth-file
-#[derive(Clone, Debug, PartialEq, Parser)]
-struct PrefixOpts {
- /// The URL to the prefix.dev server (only necessary for self-hosted instances)
- #[arg(
- short,
- long,
- env = "PREFIX_SERVER_URL",
- default_value = "https://prefix.dev"
- )]
- url: Url,
-
- /// The channel to upload the package to
- #[arg(short, long, env = "PREFIX_CHANNEL")]
- channel: String,
-
- /// The prefix.dev API key, if none is provided, the token is read from the keychain / auth-file
- #[arg(short, long, env = "PREFIX_API_KEY")]
- api_key: Option,
-}
-
-/// Options for uploading to a Anaconda.org server
-#[derive(Clone, Debug, PartialEq, Parser)]
-struct AnacondaOpts {
- /// The owner of the distribution (e.g. conda-forge or your username)
- #[arg(short, long, env = "ANACONDA_OWNER")]
- owner: String,
-
- /// The channel / label to upload the package to (e.g. main / rc)
- #[arg(short, long, env = "ANACONDA_CHANNEL", default_value = "main", num_args = 1..)]
- channel: Vec,
-
- /// The Anaconda API key, if none is provided, the token is read from the keychain / auth-file
- #[arg(short, long, env = "ANACONDA_API_KEY")]
- api_key: Option,
-
- /// The URL to the Anaconda server
- #[arg(
- short,
- long,
- env = "ANACONDA_SERVER_URL",
- default_value = "https://api.anaconda.org"
- )]
- url: Url,
-
- /// Replace files on conflict
- #[arg(long, short, env = "ANACONDA_FORCE", default_value = "false")]
- force: bool,
-}
-
-/// Options for uploading to conda-forge
-#[derive(Clone, Debug, PartialEq, Parser)]
-pub struct CondaForgeOpts {
- /// The Anaconda API key
- #[arg(long, env = "STAGING_BINSTAR_TOKEN", required = true)]
- staging_token: String,
-
- /// The feedstock name
- #[arg(long, env = "FEEDSTOCK_NAME", required = true)]
- feedstock: String,
-
- /// The feedstock token
- #[arg(long, env = "FEEDSTOCK_TOKEN", required = true)]
- feedstock_token: String,
-
- /// The staging channel name
- #[arg(long, env = "STAGING_CHANNEL", default_value = "cf-staging")]
- staging_channel: String,
-
- /// The Anaconda Server URL
- #[arg(
- long,
- env = "ANACONDA_SERVER_URL",
- default_value = "https://api.anaconda.org"
- )]
- anaconda_url: Url,
-
- /// The validation endpoint url
- #[arg(
- long,
- env = "VALIDATION_ENDPOINT",
- default_value = "https://conda-forge.herokuapp.com/feedstock-outputs/copy"
- )]
- validation_endpoint: Url,
-
- /// Post comment on promotion failure
- #[arg(long, env = "POST_COMMENT_ON_ERROR", default_value = "true")]
- post_comment_on_error: bool,
-
- /// The CI provider
- #[arg(long, env = "CI")]
- provider: Option,
-
- /// Dry run, don't actually upload anything
- #[arg(long, env = "DRY_RUN", default_value = "false")]
- dry_run: bool,
-}
-
#[tokio::main]
async fn main() -> miette::Result<()> {
- let args = App::parse();
-
- let fancy_log_handler =
- init_logging(&args.log_style, &args.verbose, &args.color).into_diagnostic()?;
-
- match args.subcommand {
+ let app = App::parse();
+ let log_handler = if !app.is_tui() {
+ Some(
+ init_logging(
+ &app.log_style,
+ &app.verbose,
+ &app.color,
+ #[cfg(feature = "tui")]
+ None,
+ )
+ .into_diagnostic()?,
+ )
+ } else {
+ None
+ };
+ match app.subcommand {
Some(SubCommands::Completion(ShellCompletion { shell })) => {
let mut cmd = App::command();
fn print_completions(gen: G, cmd: &mut clap::Command) {
@@ -456,515 +45,56 @@ async fn main() -> miette::Result<()> {
print_completions(shell, &mut cmd);
Ok(())
}
- Some(SubCommands::Build(args)) => run_build_from_args(args, fancy_log_handler).await,
- Some(SubCommands::Test(args)) => run_test_from_args(args, fancy_log_handler).await,
- Some(SubCommands::Rebuild(args)) => rebuild_from_args(args, fancy_log_handler).await,
- Some(SubCommands::Upload(args)) => upload_from_args(args).await,
- Some(SubCommands::GenerateRecipe(args)) => generate_recipe(args).await,
- Some(SubCommands::Auth(args)) => rattler::cli::auth::execute(args).await.into_diagnostic(),
- None => {
- _ = App::command().print_long_help();
- Ok(())
- }
- }
-}
-
-async fn run_test_from_args(
- args: TestOpts,
- fancy_log_handler: LoggingOutputHandler,
-) -> miette::Result<()> {
- let package_file = canonicalize(args.package_file).into_diagnostic()?;
- let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
-
- let tempdir = tempfile::tempdir().into_diagnostic()?;
-
- let test_options = TestConfiguration {
- test_prefix: tempdir.path().to_path_buf(),
- target_platform: None,
- keep_test_prefix: false,
- channels: args
- .channel
- .unwrap_or_else(|| vec!["conda-forge".to_string()]),
- tool_configuration: tool_configuration::Configuration {
- client,
- fancy_log_handler,
- // duplicate from `keep_test_prefix`?
- no_clean: false,
- ..Default::default()
- },
- };
-
- let package_name = package_file
- .file_name()
- .ok_or_else(|| miette::miette!("Could not get file name from package file"))?
- .to_string_lossy()
- .to_string();
-
- let span = tracing::info_span!("Running tests for ", recipe = %package_name);
- let _enter = span.enter();
- package_test::run_test(&package_file, &test_options)
- .await
- .into_diagnostic()?;
-
- Ok(())
-}
-
-async fn run_build_from_args(
- args: BuildOpts,
- fancy_log_handler: LoggingOutputHandler,
-) -> miette::Result<()> {
- let recipe_path = canonicalize(&args.recipe);
- if let Err(e) = &recipe_path {
- match e.kind() {
- std::io::ErrorKind::NotFound => {
- return Err(miette::miette!(
- "The file {} could not be found.",
- args.recipe.to_string_lossy()
- ));
- }
- std::io::ErrorKind::PermissionDenied => {
- return Err(miette::miette!(
- "Permission denied when trying to access the file {}.",
- args.recipe.to_string_lossy()
- ));
- }
- _ => {
- return Err(miette::miette!(
- "An unknown error occurred while trying to access the file {}: {:?}",
- args.recipe.to_string_lossy(),
- e
- ));
- }
- }
- }
- let mut recipe_path = recipe_path.into_diagnostic()?;
-
- // If the recipe_path is a directory, look for "recipe.yaml" in the directory.
- if recipe_path.is_dir() {
- let recipe_yaml_path = recipe_path.join("recipe.yaml");
- if recipe_yaml_path.exists() && recipe_yaml_path.is_file() {
- recipe_path = recipe_yaml_path;
- } else {
- return Err(miette::miette!(
- "'recipe.yaml' not found in the directory {}",
- args.recipe.to_string_lossy()
- ));
- }
- }
-
- let output_dir = args
- .common
- .output_dir
- .unwrap_or(current_dir().into_diagnostic()?.join("output"));
- if output_dir.starts_with(
- recipe_path
- .parent()
- .expect("Could not get parent of recipe"),
- ) {
- return Err(miette::miette!(
- "The output directory cannot be in the recipe directory.\nThe current output directory is: {}\nSelect a different output directory with the --output-dir option or set the CONDA_BLD_PATH environment variable"
- , output_dir.to_string_lossy()));
- }
-
- let recipe_text = fs::read_to_string(&recipe_path).into_diagnostic()?;
-
- let host_platform = if let Some(target_platform) = args.target_platform {
- Platform::from_str(&target_platform).into_diagnostic()?
- } else {
- Platform::current()
- };
-
- let selector_config = SelectorConfig {
- // We ignore noarch here
- target_platform: host_platform,
- hash: None,
- build_platform: Platform::current(),
- variant: BTreeMap::new(),
- experimental: args.common.experimental,
- };
-
- let span = tracing::info_span!("Finding outputs from recipe");
- tracing::info!("Target platform: {}", host_platform);
- let enter = span.enter();
- // First find all outputs from the recipe
- let outputs = find_outputs_from_src(&recipe_text)?;
-
- let variant_config =
- VariantConfig::from_files(&args.variant_config, &selector_config).into_diagnostic()?;
-
- let outputs_and_variants =
- variant_config.find_variants(&outputs, &recipe_text, &selector_config)?;
-
- tracing::info!("Found {} variants\n", outputs_and_variants.len());
- for discovered_output in &outputs_and_variants {
- tracing::info!(
- "Build variant: {}-{}-{}",
- discovered_output.name,
- discovered_output.version,
- discovered_output.build_string
- );
-
- let mut table = comfy_table::Table::new();
- table
- .load_preset(comfy_table::presets::UTF8_FULL_CONDENSED)
- .apply_modifier(comfy_table::modifiers::UTF8_ROUND_CORNERS)
- .set_header(vec!["Variant", "Version"]);
- for (key, value) in discovered_output.used_vars.iter() {
- table.add_row(vec![key, value]);
- }
- tracing::info!("{}\n", table);
- }
- drop(enter);
-
- let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
-
- let tool_config = tool_configuration::Configuration {
- client,
- fancy_log_handler: fancy_log_handler.clone(),
- no_clean: args.keep_build,
- no_test: args.no_test,
- use_zstd: args.common.use_zstd,
- use_bz2: args.common.use_bz2,
- };
-
- let mut subpackages = BTreeMap::new();
- let mut outputs = Vec::new();
- for discovered_output in outputs_and_variants {
- let hash =
- HashInfo::from_variant(&discovered_output.used_vars, &discovered_output.noarch_type);
-
- let selector_config = SelectorConfig {
- variant: discovered_output.used_vars.clone(),
- hash: Some(hash.clone()),
- target_platform: selector_config.target_platform,
- build_platform: selector_config.build_platform,
- experimental: args.common.experimental,
- };
-
- let recipe =
- Recipe::from_node(&discovered_output.node, selector_config).map_err(|err| {
- let errs: ParseErrors = err
- .into_iter()
- .map(|err| ParsingError::from_partial(&recipe_text, err))
- .collect::>()
- .into();
- errs
- })?;
-
- if recipe.build().skip() {
- tracing::info!(
- "Skipping build for variant: {:#?}",
- discovered_output.used_vars
- );
- continue;
- }
-
- subpackages.insert(
- recipe.package().name().clone(),
- PackageIdentifier {
- name: recipe.package().name().clone(),
- version: recipe.package().version().to_owned(),
- build_string: recipe
- .build()
- .string()
- .expect("Shouldn't be unset, needs major refactoring, for handling this better")
- .to_owned(),
- },
- );
-
- let name = recipe.package().name().clone();
- // Add the channels from the args and by default always conda-forge
- let channels = args
- .channel
- .clone()
- .unwrap_or_else(|| vec!["conda-forge".to_string()]);
-
- let timestamp = chrono::Utc::now();
-
- let output = rattler_build::metadata::Output {
- recipe,
- build_configuration: BuildConfiguration {
- target_platform: discovered_output.target_platform,
- host_platform,
- build_platform: Platform::current(),
- hash,
- variant: discovered_output.used_vars.clone(),
- directories: Directories::create(
- name.as_normalized(),
- &recipe_path,
- &output_dir,
- args.no_build_id,
- ×tamp,
+ #[cfg(feature = "tui")]
+ Some(SubCommands::Build(build_args)) => {
+ if build_args.tui {
+ let tui = rattler_build::tui::init().await?;
+ let log_handler = init_logging(
+ &app.log_style,
+ &app.verbose,
+ &app.color,
+ Some(tui.event_handler.sender.clone()),
)
- .into_diagnostic()?,
- channels,
- timestamp,
- subpackages: subpackages.clone(),
- packaging_settings: PackagingSettings::from_args(
- args.package_format.archive_type,
- args.package_format.compression_level,
- args.compression_threads,
- ),
- store_recipe: !args.no_include_recipe,
- force_colors: args.color_build_log && console::colors_enabled(),
- },
- finalized_dependencies: None,
- finalized_sources: None,
- system_tools: SystemTools::new(),
- build_summary: Arc::new(Mutex::new(BuildSummary::default())),
- };
-
- if args.render_only {
- let resolved = output
- .resolve_dependencies(&tool_config)
- .await
.into_diagnostic()?;
- println!("{}", serde_json::to_string_pretty(&resolved).unwrap());
- continue;
- }
-
- let output = match run_build(output, tool_config.clone()).await {
- Ok((output, _archive)) => {
- output.record_build_end();
- output
- }
- Err(e) => {
- tracing::error!("Error building package: {}", e);
- return Err(e);
+ rattler_build::tui::run(tui, build_args, log_handler).await
+ } else {
+ let recipe_path = get_recipe_path(&build_args.recipe)?;
+ let build_output = get_build_output(
+ build_args,
+ recipe_path,
+ log_handler.expect("logger is not initialized"),
+ )
+ .await?;
+ run_build_from_args(build_output).await
}
- };
- outputs.push(output);
- }
-
- let span = tracing::info_span!("Build summary");
- let _enter = span.enter();
- for output in outputs {
- // print summaries for each output
- let _ = output.log_build_summary().map_err(|e| {
- tracing::error!("Error writing build summary: {}", e);
- e
- });
- }
-
- Ok(())
-}
-
-async fn rebuild_from_args(
- args: RebuildOpts,
- fancy_log_handler: LoggingOutputHandler,
-) -> miette::Result<()> {
- tracing::info!("Rebuilding {}", args.package_file.to_string_lossy());
- // we extract the recipe folder from the package file (info/recipe/*)
- // and then run the rendered recipe with the same arguments as the original build
- let temp_folder = tempfile::tempdir().into_diagnostic()?;
-
- rebuild::extract_recipe(&args.package_file, temp_folder.path()).into_diagnostic()?;
-
- let temp_dir = temp_folder.into_path();
-
- tracing::info!("Extracted recipe to: {:?}", temp_dir);
-
- let rendered_recipe =
- fs::read_to_string(temp_dir.join("rendered_recipe.yaml")).into_diagnostic()?;
-
- let mut output: rattler_build::metadata::Output =
- serde_yaml::from_str(&rendered_recipe).into_diagnostic()?;
-
- // set recipe dir to the temp folder
- output.build_configuration.directories.recipe_dir = temp_dir;
-
- // create output dir and set it in the config
- let output_dir = args
- .common
- .output_dir
- .unwrap_or(current_dir().into_diagnostic()?.join("output"));
-
- fs::create_dir_all(&output_dir).into_diagnostic()?;
- output.build_configuration.directories.output_dir =
- canonicalize(output_dir).into_diagnostic()?;
-
- let client = tool_configuration::reqwest_client_from_auth_storage(args.common.auth_file);
-
- let tool_config = tool_configuration::Configuration {
- client,
- fancy_log_handler,
- no_clean: true,
- no_test: args.no_test,
- use_zstd: args.common.use_zstd,
- use_bz2: args.common.use_bz2,
- };
-
- output
- .build_configuration
- .directories
- .recreate_directories()
- .into_diagnostic()?;
-
- run_build(output, tool_config.clone()).await?;
-
- Ok(())
-}
-
-async fn upload_from_args(args: UploadOpts) -> miette::Result<()> {
- if args.package_files.is_empty() {
- return Err(miette::miette!("No package files were provided."));
- }
-
- for package_file in &args.package_files {
- if ArchiveType::try_from(package_file).is_none() {
- return Err(miette::miette!(
- "The file {} does not appear to be a conda package.",
- package_file.to_string_lossy()
- ));
}
- }
-
- let store = tool_configuration::get_auth_store(args.common.auth_file);
-
- match args.server_type {
- ServerType::Quetz(quetz_opts) => {
- upload::upload_package_to_quetz(
- &store,
- quetz_opts.api_key,
- &args.package_files,
- quetz_opts.url,
- quetz_opts.channel,
+ #[cfg(not(feature = "tui"))]
+ Some(SubCommands::Build(build_args)) => {
+ let recipe_path = get_recipe_path(&build_args.recipe)?;
+ let build_output = get_build_output(
+ build_args,
+ recipe_path,
+ log_handler.expect("logger is not initialized"),
)
.await?;
+ run_build_from_args(build_output).await
}
- ServerType::Artifactory(artifactory_opts) => {
- upload::upload_package_to_artifactory(
- &store,
- artifactory_opts.username,
- artifactory_opts.password,
- &args.package_files,
- artifactory_opts.url,
- artifactory_opts.channel,
- )
- .await?;
+ Some(SubCommands::Test(test_args)) => {
+ run_test_from_args(test_args, log_handler.expect("logger is not initialized")).await
}
- ServerType::Prefix(prefix_opts) => {
- upload::upload_package_to_prefix(
- &store,
- prefix_opts.api_key,
- &args.package_files,
- prefix_opts.url,
- prefix_opts.channel,
+ Some(SubCommands::Rebuild(rebuild_args)) => {
+ rebuild_from_args(
+ rebuild_args,
+ log_handler.expect("logger is not initialized"),
)
- .await?;
- }
- ServerType::Anaconda(anaconda_opts) => {
- upload::upload_package_to_anaconda(
- &store,
- anaconda_opts.api_key,
- &args.package_files,
- anaconda_opts.url,
- anaconda_opts.owner,
- anaconda_opts.channel,
- anaconda_opts.force,
- )
- .await?;
+ .await
}
- ServerType::CondaForge(conda_forge_opts) => {
- upload::conda_forge::upload_packages_to_conda_forge(
- conda_forge_opts,
- &args.package_files,
- )
- .await?;
+ Some(SubCommands::Upload(upload_args)) => upload_from_args(upload_args).await,
+ Some(SubCommands::GenerateRecipe(args)) => generate_recipe(args).await,
+ Some(SubCommands::Auth(args)) => rattler::cli::auth::execute(args).await.into_diagnostic(),
+ None => {
+ _ = App::command().print_long_help();
+ Ok(())
}
}
-
- Ok(())
-}
-
-#[cfg(test)]
-mod test {
- use super::PackageFormatAndCompression;
- use rattler_conda_types::package::ArchiveType;
- use rattler_package_streaming::write::CompressionLevel;
- use std::str::FromStr;
-
- #[test]
- fn test_parse_packaging() {
- let package_format = PackageFormatAndCompression::from_str("tar-bz2").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::TarBz2,
- compression_level: CompressionLevel::Default
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("conda").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::Conda,
- compression_level: CompressionLevel::Default
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("tar-bz2:1").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::TarBz2,
- compression_level: CompressionLevel::Numeric(1)
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str(".tar.bz2:max").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::TarBz2,
- compression_level: CompressionLevel::Highest
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("tarbz2:5").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::TarBz2,
- compression_level: CompressionLevel::Numeric(5)
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("conda:1").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::Conda,
- compression_level: CompressionLevel::Numeric(1)
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("conda:max").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::Conda,
- compression_level: CompressionLevel::Highest
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("conda:-5").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::Conda,
- compression_level: CompressionLevel::Numeric(-5)
- }
- );
-
- let package_format = PackageFormatAndCompression::from_str("conda:fast").unwrap();
- assert_eq!(
- package_format,
- PackageFormatAndCompression {
- archive_type: ArchiveType::Conda,
- compression_level: CompressionLevel::Lowest
- }
- );
- }
}
diff --git a/src/metadata.rs b/src/metadata.rs
index 67010d388..74475943d 100644
--- a/src/metadata.rs
+++ b/src/metadata.rs
@@ -52,7 +52,7 @@ impl Display for GitRev {
}
/// Directories used during the build process
-#[derive(Debug, Clone, Serialize, Deserialize)]
+#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Directories {
/// The directory where the recipe is located
#[serde(skip)]
@@ -72,7 +72,7 @@ pub struct Directories {
pub output_dir: PathBuf,
}
-fn setup_build_dir(
+fn get_build_dir(
output_dir: &Path,
name: &str,
no_build_id: bool,
@@ -85,14 +85,12 @@ fn setup_build_dir(
} else {
format!("rattler-build_{}_{:?}", name, since_the_epoch)
};
- let path = output_dir.join("bld").join(dirname);
- fs::create_dir_all(path.join("work"))?;
- Ok(path)
+ Ok(output_dir.join("bld").join(dirname))
}
impl Directories {
/// Create all directories needed for the building of a package
- pub fn create(
+ pub fn setup(
name: &str,
recipe_path: &Path,
output_dir: &Path,
@@ -104,7 +102,7 @@ impl Directories {
}
let output_dir = canonicalize(output_dir)?;
- let build_dir = setup_build_dir(&output_dir, name, no_build_id, timestamp)
+ let build_dir = get_build_dir(&output_dir, name, no_build_id, timestamp)
.expect("Could not create build directory");
let recipe_dir = recipe_path
.parent()
@@ -143,6 +141,12 @@ impl Directories {
Ok(directories)
}
+ /// Creates the build directory.
+ pub fn create_build_dir(&self) -> Result<(), std::io::Error> {
+ fs::create_dir_all(self.build_dir.join("work"))?;
+ Ok(())
+ }
+
/// create all directories
pub fn recreate_directories(&self) -> Result<(), std::io::Error> {
if self.build_dir.exists() {
@@ -282,7 +286,7 @@ pub struct BuildSummary {
/// A output. This is the central element that is passed to the `run_build` function
/// and fully specifies all the options and settings to run the build.
-#[derive(Clone, Serialize, Deserialize)]
+#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Output {
/// The rendered recipe that is used to build this output
pub recipe: Recipe,
@@ -599,18 +603,16 @@ mod tests {
fn setup_build_dir_test() {
// without build_id (aka timestamp)
let dir = tempfile::tempdir().unwrap();
- let p1 = setup_build_dir(dir.path(), "name", true, &Utc::now()).unwrap();
+ let p1 = get_build_dir(dir.path(), "name", true, &Utc::now()).unwrap();
let f1 = p1.file_name().unwrap();
assert!(f1.eq("rattler-build_name"));
- _ = std::fs::remove_dir_all(p1);
// with build_id (aka timestamp)
let timestamp = &Utc::now();
- let p2 = setup_build_dir(dir.path(), "name", false, timestamp).unwrap();
+ let p2 = get_build_dir(dir.path(), "name", false, timestamp).unwrap();
let f2 = p2.file_name().unwrap();
let epoch = timestamp.timestamp();
assert!(f2.eq(format!("rattler-build_name_{epoch}").as_str()));
- _ = std::fs::remove_dir_all(p2);
}
}
@@ -634,7 +636,7 @@ mod test {
fn test_directories_yaml_rendering() {
let tempdir = tempfile::tempdir().unwrap();
- let directories = Directories::create(
+ let directories = Directories::setup(
"name",
&tempdir.path().join("recipe"),
&tempdir.path().join("output"),
@@ -642,6 +644,7 @@ mod test {
&chrono::Utc::now(),
)
.unwrap();
+ directories.create_build_dir().unwrap();
// test yaml roundtrip
let yaml = serde_yaml::to_string(&directories).unwrap();
diff --git a/src/opt.rs b/src/opt.rs
new file mode 100644
index 000000000..c8946298b
--- /dev/null
+++ b/src/opt.rs
@@ -0,0 +1,535 @@
+//! Command-line options.
+
+use std::{path::PathBuf, str::FromStr};
+
+use crate::{
+ console_utils::{Color, LogStyle},
+ recipe_generator::GenerateRecipeOpts,
+};
+use clap::{arg, crate_version, Parser};
+use clap_verbosity_flag::{InfoLevel, Verbosity};
+use rattler_conda_types::package::ArchiveType;
+use rattler_package_streaming::write::CompressionLevel;
+use url::Url;
+
+/// Application subcommands.
+#[derive(Parser)]
+pub enum SubCommands {
+ /// Build a package
+ Build(BuildOpts),
+
+ /// Test a package
+ Test(TestOpts),
+
+ /// Rebuild a package
+ Rebuild(RebuildOpts),
+
+ /// Upload a package
+ Upload(UploadOpts),
+
+ /// Generate shell completion script
+ Completion(ShellCompletion),
+
+ /// Generate a recipe from PyPI or CRAN
+ GenerateRecipe(GenerateRecipeOpts),
+
+ /// Handle authentication to external repositories
+ Auth(rattler::cli::auth::Args),
+}
+
+/// Shell completion options.
+#[derive(Parser)]
+pub struct ShellCompletion {
+ /// Shell.
+ #[arg(short, long)]
+ pub shell: Option,
+}
+
+#[allow(missing_docs)]
+#[derive(Parser)]
+#[clap(version = crate_version!())]
+pub struct App {
+ /// Subcommand.
+ #[clap(subcommand)]
+ pub subcommand: Option,
+
+ /// Enable verbose logging.
+ #[command(flatten)]
+ pub verbose: Verbosity,
+
+ /// Logging style
+ #[clap(
+ long,
+ env = "RATTLER_BUILD_LOG_STYLE",
+ default_value = "fancy",
+ global = true
+ )]
+ pub log_style: LogStyle,
+
+ /// Enable or disable colored output from rattler-build.
+ /// Also honors the `CLICOLOR` and `CLICOLOR_FORCE` environment variable.
+ #[clap(
+ long,
+ env = "RATTLER_BUILD_COLOR",
+ default_value = "auto",
+ global = true
+ )]
+ pub color: Color,
+}
+
+impl App {
+ /// Returns true if the application will launch a TUI.
+ #[cfg(feature = "tui")]
+ pub fn is_tui(&self) -> bool {
+ match &self.subcommand {
+ Some(SubCommands::Build(args)) => args.tui,
+ _ => false,
+ }
+ }
+
+ /// Returns true if the application will launch a TUI.
+ #[cfg(not(feature = "tui"))]
+ pub fn is_tui(&self) -> bool {
+ false
+ }
+}
+
+/// Common opts that are shared between [`Rebuild`] and [`Build`]` subcommands
+#[derive(Parser, Clone)]
+pub struct CommonOpts {
+ /// Output directory for build artifacts. Defaults to `./output`.
+ #[clap(long, env = "CONDA_BLD_PATH")]
+ pub output_dir: Option,
+
+ /// Enable support for repodata.json.zst
+ #[clap(long, env = "RATTLER_ZSTD", default_value = "true", hide = true)]
+ pub use_zstd: bool,
+
+ /// Enable support for repodata.json.bz2
+ #[clap(long, env = "RATTLER_BZ2", default_value = "true", hide = true)]
+ pub use_bz2: bool,
+
+ /// Enable experimental features
+ #[arg(long, env = "RATTLER_BUILD_EXPERIMENTAL")]
+ pub experimental: bool,
+
+ /// Path to an auth-file to read authentication information from
+ #[clap(long, env = "RATTLER_AUTH_FILE", hide = true)]
+ pub auth_file: Option,
+}
+
+/// Container for the CLI package format and compression level
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct PackageFormatAndCompression {
+ /// The archive type that is selected
+ pub archive_type: ArchiveType,
+ /// The compression level that is selected
+ pub compression_level: CompressionLevel,
+}
+
+// deserializer for the package format and compression level
+impl FromStr for PackageFormatAndCompression {
+ type Err = String;
+
+ fn from_str(s: &str) -> Result {
+ let mut split = s.split(':');
+ let package_format = split.next().ok_or("invalid")?;
+
+ let compression = split.next().unwrap_or("default");
+
+ // remove all non-alphanumeric characters
+ let package_format = package_format
+ .chars()
+ .filter(|c| c.is_alphanumeric())
+ .collect::();
+
+ let archive_type = match package_format.to_lowercase().as_str() {
+ "tarbz2" => ArchiveType::TarBz2,
+ "conda" => ArchiveType::Conda,
+ _ => return Err(format!("Unknown package format: {}", package_format)),
+ };
+
+ let compression_level = match compression {
+ "max" | "highest" => CompressionLevel::Highest,
+ "default" | "normal" => CompressionLevel::Default,
+ "fast" | "lowest" | "min" => CompressionLevel::Lowest,
+ number if number.parse::().is_ok() => {
+ let number = number.parse::().unwrap_or_default();
+ match archive_type {
+ ArchiveType::TarBz2 => {
+ if !(1..=9).contains(&number) {
+ return Err("Compression level for .tar.bz2 must be between 1 and 9"
+ .to_string());
+ }
+ }
+ ArchiveType::Conda => {
+ if !(-7..=22).contains(&number) {
+ return Err(
+ "Compression level for conda packages (zstd) must be between -7 and 22".to_string()
+ );
+ }
+ }
+ }
+ CompressionLevel::Numeric(number)
+ }
+ _ => return Err(format!("Unknown compression level: {}", compression)),
+ };
+
+ Ok(PackageFormatAndCompression {
+ archive_type,
+ compression_level,
+ })
+ }
+}
+
+/// Build options.
+#[derive(Parser, Clone)]
+pub struct BuildOpts {
+ /// The recipe file or directory containing `recipe.yaml`. Defaults to the current directory.
+ #[arg(short, long, default_value = ".")]
+ pub recipe: PathBuf,
+
+ /// The target platform for the build.
+ #[arg(long)]
+ pub target_platform: Option,
+
+ /// Add the channels needed for the recipe using this option. For more then one channel use it multiple times.
+ /// The default channel is `conda-forge`.
+ #[arg(short = 'c', long)]
+ pub channel: Option>,
+
+ /// Variant configuration files for the build.
+ #[arg(short = 'm', long)]
+ pub variant_config: Vec,
+
+ /// Render the recipe files without executing the build.
+ #[arg(long)]
+ pub render_only: bool,
+
+ /// Keep intermediate build artifacts after the build.
+ #[arg(long)]
+ pub keep_build: bool,
+
+ /// Don't use build id(timestamp) when creating build directory name. Defaults to `false`.
+ #[arg(long)]
+ pub no_build_id: bool,
+
+ /// The package format to use for the build. Can be one of `tar-bz2` or `conda`.
+ /// You can also add a compression level to the package format, e.g. `tar-bz2:` (from 1 to 9) or `conda:` (from -7 to 22).
+ #[arg(long, default_value = "conda")]
+ pub package_format: PackageFormatAndCompression,
+
+ #[arg(long)]
+ /// The number of threads to use for compression (only relevant when also using `--package-format conda`)
+ pub compression_threads: Option,
+
+ /// Do not store the recipe in the final package
+ #[arg(long)]
+ pub no_include_recipe: bool,
+
+ /// Do not run tests after building
+ #[arg(long, default_value = "false")]
+ pub no_test: bool,
+
+ /// Do not force colors in the output of the build script
+ #[arg(long, default_value = "true")]
+ pub color_build_log: bool,
+
+ /// Common options.
+ #[clap(flatten)]
+ pub common: CommonOpts,
+
+ /// Launch the terminal user interface.
+ #[cfg(feature = "tui")]
+ #[arg(long, default_value = "false")]
+ pub tui: bool,
+}
+
+/// Test options.
+#[derive(Parser)]
+pub struct TestOpts {
+ /// Channels to use when testing
+ #[arg(short = 'c', long)]
+ pub channel: Option>,
+
+ /// The package file to test
+ #[arg(short, long)]
+ pub package_file: PathBuf,
+
+ /// Common options.
+ #[clap(flatten)]
+ pub common: CommonOpts,
+}
+
+/// Rebuild options.
+#[derive(Parser)]
+pub struct RebuildOpts {
+ /// The package file to rebuild
+ #[arg(short, long)]
+ pub package_file: PathBuf,
+
+ /// Do not run tests after building
+ #[arg(long, default_value = "false")]
+ pub no_test: bool,
+
+ /// Common options.
+ #[clap(flatten)]
+ pub common: CommonOpts,
+}
+
+/// Upload options.
+#[derive(Parser)]
+pub struct UploadOpts {
+ /// The package file to upload
+ #[arg(global = true, required = false)]
+ pub package_files: Vec,
+
+ /// The server type
+ #[clap(subcommand)]
+ pub server_type: ServerType,
+
+ /// Common options.
+ #[clap(flatten)]
+ pub common: CommonOpts,
+}
+
+/// Server type.
+#[derive(Clone, Debug, PartialEq, Parser)]
+#[allow(missing_docs)]
+pub enum ServerType {
+ Quetz(QuetzOpts),
+ Artifactory(ArtifactoryOpts),
+ Prefix(PrefixOpts),
+ Anaconda(AnacondaOpts),
+ #[clap(hide = true)]
+ CondaForge(CondaForgeOpts),
+}
+
+#[derive(Clone, Debug, PartialEq, Parser)]
+/// Options for uploading to a Quetz server.
+/// Authentication is used from the keychain / auth-file.
+pub struct QuetzOpts {
+ /// The URL to your Quetz server
+ #[arg(short, long, env = "QUETZ_SERVER_URL")]
+ pub url: Url,
+
+ /// The URL to your channel
+ #[arg(short, long, env = "QUETZ_CHANNEL")]
+ pub channel: String,
+
+ /// The Quetz API key, if none is provided, the token is read from the keychain / auth-file
+ #[arg(short, long, env = "QUETZ_API_KEY")]
+ pub api_key: Option,
+}
+
+#[derive(Clone, Debug, PartialEq, Parser)]
+/// Options for uploading to a Artifactory channel.
+/// Authentication is used from the keychain / auth-file.
+pub struct ArtifactoryOpts {
+ /// The URL to your Artifactory server
+ #[arg(short, long, env = "ARTIFACTORY_SERVER_URL")]
+ pub url: Url,
+
+ /// The URL to your channel
+ #[arg(short, long, env = "ARTIFACTORY_CHANNEL")]
+ pub channel: String,
+
+ /// Your Artifactory username
+ #[arg(short = 'r', long, env = "ARTIFACTORY_USERNAME")]
+ pub username: Option,
+
+ /// Your Artifactory password
+ #[arg(short, long, env = "ARTIFACTORY_PASSWORD")]
+ pub password: Option,
+}
+
+/// Options for uploading to a prefix.dev server.
+/// Authentication is used from the keychain / auth-file
+#[derive(Clone, Debug, PartialEq, Parser)]
+pub struct PrefixOpts {
+ /// The URL to the prefix.dev server (only necessary for self-hosted instances)
+ #[arg(
+ short,
+ long,
+ env = "PREFIX_SERVER_URL",
+ default_value = "https://prefix.dev"
+ )]
+ pub url: Url,
+
+ /// The channel to upload the package to
+ #[arg(short, long, env = "PREFIX_CHANNEL")]
+ pub channel: String,
+
+ /// The prefix.dev API key, if none is provided, the token is read from the keychain / auth-file
+ #[arg(short, long, env = "PREFIX_API_KEY")]
+ pub api_key: Option,
+}
+
+/// Options for uploading to a Anaconda.org server
+#[derive(Clone, Debug, PartialEq, Parser)]
+pub struct AnacondaOpts {
+ /// The owner of the distribution (e.g. conda-forge or your username)
+ #[arg(short, long, env = "ANACONDA_OWNER")]
+ pub owner: String,
+
+ /// The channel / label to upload the package to (e.g. main / rc)
+ #[arg(short, long, env = "ANACONDA_CHANNEL", default_value = "main", num_args = 1..)]
+ pub channel: Vec,
+
+ /// The Anaconda API key, if none is provided, the token is read from the keychain / auth-file
+ #[arg(short, long, env = "ANACONDA_API_KEY")]
+ pub api_key: Option,
+
+ /// The URL to the Anaconda server
+ #[arg(
+ short,
+ long,
+ env = "ANACONDA_SERVER_URL",
+ default_value = "https://api.anaconda.org"
+ )]
+ pub url: Url,
+
+ /// Replace files on conflict
+ #[arg(long, short, env = "ANACONDA_FORCE", default_value = "false")]
+ pub force: bool,
+}
+
+/// Options for uploading to conda-forge
+#[derive(Clone, Debug, PartialEq, Parser)]
+pub struct CondaForgeOpts {
+ /// The Anaconda API key
+ #[arg(long, env = "STAGING_BINSTAR_TOKEN", required = true)]
+ pub staging_token: String,
+
+ /// The feedstock name
+ #[arg(long, env = "FEEDSTOCK_NAME", required = true)]
+ pub feedstock: String,
+
+ /// The feedstock token
+ #[arg(long, env = "FEEDSTOCK_TOKEN", required = true)]
+ pub feedstock_token: String,
+
+ /// The staging channel name
+ #[arg(long, env = "STAGING_CHANNEL", default_value = "cf-staging")]
+ pub staging_channel: String,
+
+ /// The Anaconda Server URL
+ #[arg(
+ long,
+ env = "ANACONDA_SERVER_URL",
+ default_value = "https://api.anaconda.org"
+ )]
+ pub anaconda_url: Url,
+
+ /// The validation endpoint url
+ #[arg(
+ long,
+ env = "VALIDATION_ENDPOINT",
+ default_value = "https://conda-forge.herokuapp.com/feedstock-outputs/copy"
+ )]
+ pub validation_endpoint: Url,
+
+ /// Post comment on promotion failure
+ #[arg(long, env = "POST_COMMENT_ON_ERROR", default_value = "true")]
+ pub post_comment_on_error: bool,
+
+ /// The CI provider
+ #[arg(long, env = "CI")]
+ pub provider: Option,
+
+ /// Dry run, don't actually upload anything
+ #[arg(long, env = "DRY_RUN", default_value = "false")]
+ pub dry_run: bool,
+}
+
+#[cfg(test)]
+mod test {
+ use super::PackageFormatAndCompression;
+ use rattler_conda_types::package::ArchiveType;
+ use rattler_package_streaming::write::CompressionLevel;
+ use std::str::FromStr;
+
+ #[test]
+ fn test_parse_packaging() {
+ let package_format = PackageFormatAndCompression::from_str("tar-bz2").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::TarBz2,
+ compression_level: CompressionLevel::Default
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("conda").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::Conda,
+ compression_level: CompressionLevel::Default
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("tar-bz2:1").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::TarBz2,
+ compression_level: CompressionLevel::Numeric(1)
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str(".tar.bz2:max").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::TarBz2,
+ compression_level: CompressionLevel::Highest
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("tarbz2:5").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::TarBz2,
+ compression_level: CompressionLevel::Numeric(5)
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("conda:1").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::Conda,
+ compression_level: CompressionLevel::Numeric(1)
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("conda:max").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::Conda,
+ compression_level: CompressionLevel::Highest
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("conda:-5").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::Conda,
+ compression_level: CompressionLevel::Numeric(-5)
+ }
+ );
+
+ let package_format = PackageFormatAndCompression::from_str("conda:fast").unwrap();
+ assert_eq!(
+ package_format,
+ PackageFormatAndCompression {
+ archive_type: ArchiveType::Conda,
+ compression_level: CompressionLevel::Lowest
+ }
+ );
+ }
+}
diff --git a/src/rebuild.rs b/src/rebuild.rs
index 11143d64c..5a94a5845 100644
--- a/src/rebuild.rs
+++ b/src/rebuild.rs
@@ -1,7 +1,10 @@
+//! The rebuild module contains rebuild helper functions.
+
use std::path::{Path, PathBuf};
use rattler_conda_types::package::ArchiveType;
+/// Extracts a folder from a tar.bz2 archive.
fn folder_from_tar_bz2(
archive_path: &Path,
find_path: &Path,
@@ -27,6 +30,7 @@ fn folder_from_tar_bz2(
Ok(())
}
+/// Extracts a folder from a conda archive.
fn folder_from_conda(
archive_path: &Path,
find_path: &Path,
@@ -58,7 +62,8 @@ fn folder_from_conda(
Ok(())
}
-pub(crate) fn extract_recipe(package: &Path, dest_folder: &Path) -> Result<(), std::io::Error> {
+/// Extracts a recipe from a package archive to a destination folder.
+pub fn extract_recipe(package: &Path, dest_folder: &Path) -> Result<(), std::io::Error> {
let archive_type = ArchiveType::try_from(package).ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::NotFound,
diff --git a/src/system_tools.rs b/src/system_tools.rs
index d66881115..0954ca505 100644
--- a/src/system_tools.rs
+++ b/src/system_tools.rs
@@ -2,10 +2,10 @@
use serde::{Deserialize, Serialize, Serializer};
use std::{
- cell::RefCell,
collections::{BTreeMap, HashMap},
path::PathBuf,
process::Command,
+ sync::{Arc, Mutex},
};
use thiserror::Error;
@@ -57,16 +57,16 @@ impl std::fmt::Display for Tool {
#[derive(Debug, Clone)]
pub struct SystemTools {
rattler_build_version: String,
- used_tools: RefCell>,
- found_tools: RefCell>,
+ used_tools: Arc>>,
+ found_tools: Arc>>,
}
impl Default for SystemTools {
fn default() -> Self {
Self {
rattler_build_version: env!("CARGO_PKG_VERSION").to_string(),
- used_tools: RefCell::new(HashMap::new()),
- found_tools: RefCell::new(HashMap::new()),
+ used_tools: Arc::new(Mutex::new(HashMap::new())),
+ found_tools: Arc::new(Mutex::new(HashMap::new())),
}
}
}
@@ -93,8 +93,8 @@ impl SystemTools {
Self {
rattler_build_version,
- used_tools: RefCell::new(used_tools),
- found_tools: RefCell::new(HashMap::new()),
+ used_tools: Arc::new(Mutex::new(used_tools)),
+ found_tools: Arc::new(Mutex::new(HashMap::new())),
}
}
@@ -148,9 +148,10 @@ impl SystemTools {
let found_version = found_version.trim().to_string();
self.found_tools
- .borrow_mut()
+ .lock()
+ .unwrap()
.insert(tool, tool_path.clone());
- let prev_version = self.used_tools.borrow().get(&tool).cloned();
+ let prev_version = self.used_tools.lock().unwrap().get(&tool).cloned();
if let Some(prev_version) = prev_version {
if prev_version != found_version {
@@ -161,7 +162,7 @@ impl SystemTools {
);
}
} else {
- self.used_tools.borrow_mut().insert(tool, found_version);
+ self.used_tools.lock().unwrap().insert(tool, found_version);
}
Ok(tool_path)
@@ -170,7 +171,7 @@ impl SystemTools {
/// Create a new `std::process::Command` for the given tool. The command is created with the
/// path to the tool and can be further configured with arguments and environment variables.
pub fn call(&self, tool: Tool) -> Result {
- let found_tool = self.found_tools.borrow().get(&tool).cloned();
+ let found_tool = self.found_tools.lock().unwrap().get(&tool).cloned();
let tool_path = if let Some(tool) = found_tool {
tool
} else {
@@ -185,7 +186,7 @@ impl SystemTools {
impl Serialize for SystemTools {
fn serialize(&self, serializer: S) -> Result {
let mut ordered_map = BTreeMap::new();
- let used_tools = self.used_tools.borrow();
+ let used_tools = self.used_tools.lock().unwrap();
for (tool, version) in used_tools.iter() {
ordered_map.insert(tool.to_string(), version);
}
@@ -222,10 +223,10 @@ mod tests {
let stdout = cmd.arg("--version").output().unwrap().stdout;
let version = String::from_utf8_lossy(&stdout).trim().to_string();
- let found_tools = system_tool.found_tools.borrow();
+ let found_tools = system_tool.found_tools.lock().unwrap();
assert!(found_tools.contains_key(&Tool::Patchelf));
- let used_tools = system_tool.used_tools.borrow();
+ let used_tools = system_tool.used_tools.lock().unwrap();
assert!(used_tools.contains_key(&Tool::Patchelf));
assert!(used_tools.get(&Tool::Patchelf).unwrap() == &version);
@@ -241,8 +242,8 @@ mod tests {
let system_tool = SystemTools {
rattler_build_version: "0.0.0".to_string(),
- used_tools: RefCell::new(used_tools),
- found_tools: RefCell::new(HashMap::new()),
+ used_tools: Arc::new(Mutex::new(used_tools)),
+ found_tools: Arc::new(Mutex::new(HashMap::new())),
};
let json = serde_json::to_string_pretty(&system_tool).unwrap();
@@ -251,7 +252,8 @@ mod tests {
let deserialized: SystemTools = serde_json::from_str(&json).unwrap();
assert!(deserialized
.used_tools
- .borrow()
+ .lock()
+ .unwrap()
.contains_key(&Tool::Patchelf));
}
}
diff --git a/src/tui/event.rs b/src/tui/event.rs
new file mode 100644
index 000000000..8c3298eb2
--- /dev/null
+++ b/src/tui/event.rs
@@ -0,0 +1,135 @@
+//! TUI event handling.
+
+use crossterm::event::{Event as CrosstermEvent, KeyEvent, MouseEvent};
+use futures::{FutureExt, StreamExt};
+use miette::IntoDiagnostic;
+use std::{path::PathBuf, time::Duration};
+use tokio::sync::mpsc;
+use tokio_util::sync::CancellationToken;
+
+use super::state::{BuildProgress, Package};
+use crate::BuildOutput;
+
+/// Terminal events.
+pub enum Event {
+ /// Terminal tick.
+ Tick,
+ /// Key press.
+ Key(KeyEvent),
+ /// Mouse click/scroll.
+ Mouse(MouseEvent),
+ /// Terminal resize.
+ Resize(u16, u16),
+ /// Resolves packages to build.
+ ResolvePackages(PathBuf),
+ /// Handles the result of resolving packages.
+ ProcessResolvedPackages(BuildOutput, Vec),
+ /// Start building.
+ StartBuild(usize),
+ /// Set build state.
+ SetBuildState(usize, BuildProgress),
+ /// Build log.
+ BuildLog(Vec),
+ /// Handle build error.
+ HandleBuildError(miette::Error, usize),
+ /// Handle console input.
+ HandleInput,
+ /// Edit recipe.
+ EditRecipe,
+}
+
+/// Terminal event handler.
+#[derive(Debug)]
+#[allow(dead_code)]
+pub struct EventHandler {
+ /// Tick rate.
+ tick_rate: Duration,
+ /// Event sender channel.
+ pub sender: mpsc::UnboundedSender,
+ /// Event receiver channel.
+ receiver: mpsc::UnboundedReceiver,
+ /// Event handler thread.
+ handler: tokio::task::JoinHandle<()>,
+ /// Token for cancelling the event loop.
+ cancellation_token: CancellationToken,
+}
+
+impl EventHandler {
+ /// Constructs a new instance.
+ pub fn new(tick_rate: u64) -> Self {
+ let (sender, receiver) = mpsc::unbounded_channel();
+ Self {
+ tick_rate: Duration::from_millis(tick_rate),
+ sender,
+ receiver,
+ handler: tokio::spawn(async {}),
+ cancellation_token: CancellationToken::new(),
+ }
+ }
+
+ /// Starts the event loop.
+ pub fn start(&mut self) {
+ self.cancel();
+ self.cancellation_token = CancellationToken::new();
+ let _cancellation_token = self.cancellation_token.clone();
+ let _sender = self.sender.clone();
+ let _tick_rate = self.tick_rate;
+ self.handler = tokio::spawn(async move {
+ let mut reader = crossterm::event::EventStream::new();
+ let mut tick = tokio::time::interval(_tick_rate);
+ loop {
+ let tick_delay = tick.tick();
+ let crossterm_event = reader.next().fuse();
+ tokio::select! {
+ _ = _cancellation_token.cancelled() => {
+ break;
+ }
+ _ = tick_delay => {
+ _sender.send(Event::Tick).unwrap();
+ }
+ Some(Ok(evt)) = crossterm_event => {
+ match evt {
+ CrosstermEvent::Key(key) => {
+ if key.kind == crossterm::event::KeyEventKind::Press {
+ _sender.send(Event::Key(key)).unwrap();
+ }
+ },
+ CrosstermEvent::Mouse(mouse) => {
+ _sender.send(Event::Mouse(mouse)).unwrap();
+ },
+ CrosstermEvent::Resize(x, y) => {
+ _sender.send(Event::Resize(x, y)).unwrap();
+ },
+ CrosstermEvent::FocusLost => {
+ },
+ CrosstermEvent::FocusGained => {
+ },
+ CrosstermEvent::Paste(_) => {
+ },
+ }
+ }
+ };
+ }
+ });
+ }
+
+ /// Cancels the event loop.
+ pub fn cancel(&self) {
+ self.cancellation_token.cancel();
+ }
+
+ /// Receive the next event from the handler thread.
+ ///
+ /// This function will always block the current thread if
+ /// there is no data available and it's possible for more data to be sent.
+ pub async fn next(&mut self) -> miette::Result {
+ self.receiver
+ .recv()
+ .await
+ .ok_or(std::io::Error::new(
+ std::io::ErrorKind::Other,
+ "IO error occurred",
+ ))
+ .into_diagnostic()
+ }
+}
diff --git a/src/tui/logger.rs b/src/tui/logger.rs
new file mode 100644
index 000000000..b4bc66ea1
--- /dev/null
+++ b/src/tui/logger.rs
@@ -0,0 +1,47 @@
+//! TUI log handler.
+
+use super::event::Event;
+use std::io;
+use tokio::sync::mpsc;
+use tracing_subscriber::fmt::MakeWriter;
+
+/// Writer for TUI logs.
+#[derive(Debug)]
+pub struct TuiOutputHandler {
+ /// Sender channel for logs.
+ pub log_sender: mpsc::UnboundedSender,
+}
+
+impl Clone for TuiOutputHandler {
+ fn clone(&self) -> Self {
+ Self {
+ log_sender: self.log_sender.clone(),
+ }
+ }
+}
+
+impl io::Write for TuiOutputHandler {
+ fn write(&mut self, buf: &[u8]) -> io::Result {
+ self.log_sender
+ .send(Event::BuildLog(buf.to_vec()))
+ .map_err(|e| {
+ io::Error::new(
+ io::ErrorKind::Other,
+ format!("could not send TUI event: {e}"),
+ )
+ })?;
+ Ok(buf.len())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl<'a> MakeWriter<'a> for TuiOutputHandler {
+ type Writer = TuiOutputHandler;
+
+ fn make_writer(&'a self) -> Self::Writer {
+ self.clone()
+ }
+}
diff --git a/src/tui/mod.rs b/src/tui/mod.rs
new file mode 100644
index 000000000..ccdbd174f
--- /dev/null
+++ b/src/tui/mod.rs
@@ -0,0 +1,274 @@
+//! Terminal user interface for rattler-build.
+
+pub mod event;
+pub mod logger;
+mod render;
+mod state;
+mod utils;
+
+use event::*;
+use render::*;
+use state::*;
+
+use crossterm::event::{DisableMouseCapture, EnableMouseCapture};
+use crossterm::terminal::{self, EnterAlternateScreen, LeaveAlternateScreen};
+use miette::IntoDiagnostic;
+use ratatui::backend::Backend;
+use ratatui::prelude::*;
+use ratatui::Terminal;
+use std::io::{self, Stderr};
+use std::panic;
+
+use crate::build::run_build;
+use crate::console_utils::LoggingOutputHandler;
+use crate::get_recipe_path;
+use crate::opt::BuildOpts;
+
+use self::utils::run_editor;
+
+/// Representation of a terminal user interface.
+///
+/// It is responsible for setting up the terminal,
+/// initializing the interface and handling the draw events.
+#[derive(Debug)]
+pub struct Tui {
+ /// Interface to the Terminal.
+ terminal: Terminal,
+ /// Terminal event handler.
+ pub event_handler: EventHandler,
+ /// Is the interface paused?
+ pub paused: bool,
+}
+
+impl Tui {
+ /// Constructs a new instance of [`Tui`].
+ pub(crate) fn new(terminal: Terminal, event_handler: EventHandler) -> Self {
+ Self {
+ terminal,
+ event_handler,
+ paused: false,
+ }
+ }
+
+ /// Initializes the terminal interface.
+ ///
+ /// It enables the raw mode and sets terminal properties.
+ pub(crate) fn init(&mut self) -> miette::Result<()> {
+ terminal::enable_raw_mode().into_diagnostic()?;
+ crossterm::execute!(io::stderr(), EnterAlternateScreen, EnableMouseCapture)
+ .into_diagnostic()?;
+
+ // Define a custom panic hook to reset the terminal properties.
+ // This way, you won't have your terminal messed up if an unexpected error happens.
+ let panic_hook = panic::take_hook();
+ panic::set_hook(Box::new(move |panic| {
+ Self::reset().expect("failed to reset the terminal");
+ panic_hook(panic);
+ }));
+
+ self.terminal.hide_cursor().into_diagnostic()?;
+ self.terminal.clear().into_diagnostic()?;
+ self.event_handler.start();
+ Ok(())
+ }
+
+ /// Draw the terminal interface by rendering the widgets.
+ pub(crate) fn draw(&mut self, state: &mut TuiState) -> miette::Result<()> {
+ self.terminal
+ .draw(|frame| render_widgets(state, frame))
+ .into_diagnostic()?;
+ Ok(())
+ }
+
+ /// Toggles the paused state of interface.
+ ///
+ /// It disables the key input and exits the
+ /// terminal interface on pause (and vice-versa).
+ pub fn toggle_pause(&mut self) -> miette::Result<()> {
+ self.paused = !self.paused;
+ if self.paused {
+ Self::reset()?;
+ self.event_handler.cancel();
+ } else {
+ self.init()?;
+ self.event_handler.start();
+ }
+ Ok(())
+ }
+
+ /// Resets the terminal interface.
+ ///
+ /// This function is also used for the panic hook to revert
+ /// the terminal properties if unexpected errors occur.
+ fn reset() -> miette::Result<()> {
+ terminal::disable_raw_mode().into_diagnostic()?;
+ crossterm::execute!(io::stderr(), LeaveAlternateScreen, DisableMouseCapture)
+ .into_diagnostic()?;
+ Terminal::new(CrosstermBackend::new(io::stderr()))
+ .into_diagnostic()?
+ .show_cursor()
+ .into_diagnostic()?;
+ Ok(())
+ }
+
+ /// Exits the terminal interface.
+ ///
+ /// It disables the raw mode and reverts back the terminal properties.
+ pub(crate) fn exit(&mut self) -> miette::Result<()> {
+ Self::reset()?;
+ self.terminal.show_cursor().into_diagnostic()?;
+ Ok(())
+ }
+}
+
+/// Initializes the TUI.
+pub async fn init() -> miette::Result>> {
+ let backend = CrosstermBackend::new(io::stderr());
+ let terminal = Terminal::new(backend).into_diagnostic()?;
+ let events = EventHandler::new(250);
+ let mut tui = Tui::new(terminal, events);
+ tui.init()?;
+ Ok(tui)
+}
+
+/// Launches the terminal user interface.
+pub async fn run(
+ mut tui: Tui,
+ opts: BuildOpts,
+ log_handler: LoggingOutputHandler,
+) -> miette::Result<()> {
+ // Get the recipe.
+ let recipe_path = get_recipe_path(&opts.recipe)?;
+
+ // Create an application.
+ let mut state = TuiState::new(opts, log_handler);
+
+ // Resolve the packages to build.
+ tui.event_handler
+ .sender
+ .send(Event::ResolvePackages(recipe_path))
+ .into_diagnostic()?;
+
+ // Start the main loop.
+ while state.running {
+ // Render the user interface.
+ tui.draw(&mut state)?;
+ // Handle events.
+ match tui.event_handler.next().await? {
+ Event::Tick => state.tick(),
+ Event::Key(key_event) => {
+ handle_key_events(key_event, tui.event_handler.sender.clone(), &mut state)?
+ }
+ Event::Mouse(mouse_event) => {
+ handle_mouse_events(mouse_event, tui.event_handler.sender.clone(), &mut state)?
+ }
+ Event::Resize(_, _) => {}
+ Event::ResolvePackages(recipe_path) => {
+ let log_sender = tui.event_handler.sender.clone();
+ let state = state.clone();
+ tokio::spawn(async move {
+ let resolved = state.resolve_packages(recipe_path).await.unwrap();
+ log_sender
+ .send(Event::ProcessResolvedPackages(resolved.0, resolved.1))
+ .unwrap();
+ });
+ }
+ Event::ProcessResolvedPackages(build_output, packages) => {
+ state.build_output = Some(build_output);
+ state.packages = packages.clone();
+ }
+ Event::StartBuild(index) => {
+ if !state.is_building_package() {
+ let package = state.packages[index].clone();
+ let build_output = state.build_output.clone().unwrap();
+ let tool_config = build_output.tool_config.clone();
+ let log_sender = tui.event_handler.sender.clone();
+ let mut packages = Vec::new();
+ for subpackage in package.subpackages.iter() {
+ if let Some(i) = state.packages.iter().position(|v| v.name == *subpackage) {
+ packages.push((i, state.packages[i].clone()));
+ } else {
+ tracing::error!("Cannot find subpackage to build: {subpackage}")
+ }
+ }
+ packages.push((index, package.clone()));
+ tokio::spawn(async move {
+ for (i, package) in packages {
+ log_sender
+ .send(Event::SetBuildState(i, BuildProgress::Building))
+ .unwrap();
+ match run_build(package.output, &tool_config).await {
+ Ok((output, _archive)) => {
+ output.record_build_end();
+ let span = tracing::info_span!("Build summary");
+ let _enter = span.enter();
+ let _ = output.log_build_summary().map_err(|e| {
+ tracing::error!("Error writing build summary: {}", e);
+ e
+ });
+ log_sender
+ .send(Event::SetBuildState(i, BuildProgress::Done))
+ .unwrap();
+ }
+ Err(e) => {
+ tracing::error!("Error building package: {}", e);
+ log_sender.send(Event::HandleBuildError(e, i)).unwrap();
+ break;
+ }
+ };
+ }
+ });
+ }
+ }
+ Event::SetBuildState(index, progress) => {
+ state.selected_package = index;
+ state.packages[index].build_progress = progress;
+ }
+ Event::BuildLog(log) => {
+ if let Some(building_package) = state
+ .packages
+ .iter_mut()
+ .find(|p| p.build_progress.is_building())
+ {
+ building_package
+ .build_log
+ .push(String::from_utf8_lossy(&log).to_string());
+ } else {
+ state.log.push(String::from_utf8_lossy(&log).to_string());
+ }
+ }
+ Event::HandleBuildError(_, i) => {
+ state.packages[i].build_progress = BuildProgress::Failed;
+ }
+ Event::HandleInput => {
+ state.input_mode = false;
+ if state.input.value() == "edit" {
+ tui.event_handler
+ .sender
+ .send(Event::EditRecipe)
+ .into_diagnostic()?;
+ } else {
+ tracing::error!("Unknown command: {}", state.input.value());
+ tracing::info!("Available commands are: [edit]");
+ }
+ state.input.reset();
+ }
+ Event::EditRecipe => {
+ state.input_mode = false;
+ state.input.reset();
+ let build_output = state.build_output.clone().unwrap();
+ tui.toggle_pause()?;
+ run_editor(&build_output.recipe_path)?;
+ tui.event_handler
+ .sender
+ .send(Event::ResolvePackages(build_output.recipe_path))
+ .into_diagnostic()?;
+ tui.toggle_pause()?;
+ }
+ }
+ }
+
+ // Exit the user interface.
+ tui.exit()?;
+ Ok(())
+}
diff --git a/src/tui/render.rs b/src/tui/render.rs
new file mode 100644
index 000000000..315d30d05
--- /dev/null
+++ b/src/tui/render.rs
@@ -0,0 +1,376 @@
+use super::event::Event;
+use super::state::TuiState;
+use ansi_to_tui::IntoText;
+use crossterm::event::{
+ Event as CrosstermEvent, KeyCode, KeyEvent, KeyModifiers, MouseButton, MouseEvent,
+ MouseEventKind,
+};
+use miette::IntoDiagnostic;
+use ratatui::layout::Position;
+use ratatui::prelude::*;
+use ratatui::widgets::{Scrollbar, ScrollbarOrientation, ScrollbarState};
+use ratatui::{
+ layout::Alignment,
+ style::{Color, Style},
+ widgets::{Block, BorderType, Paragraph},
+ Frame,
+};
+use tokio::sync::mpsc;
+use tui_input::backend::crossterm::EventHandler;
+
+/// Key bindings.
+const KEY_BINDINGS: &[(&str, &str)] = &[
+ ("Enter", "Build"),
+ ("q", "Quit"),
+ ("j", "Next"),
+ ("k", "Prev"),
+ ("↕ ↔ ", "Scroll"),
+ ("e", "Edit Recipe"),
+ ("c", "Console"),
+];
+
+/// Handles the key events and updates the state.
+pub(crate) fn handle_key_events(
+ key_event: KeyEvent,
+ sender: mpsc::UnboundedSender,
+ state: &mut TuiState,
+) -> miette::Result<()> {
+ if state.input_mode {
+ match key_event.code {
+ KeyCode::Enter => sender.send(Event::HandleInput).into_diagnostic()?,
+ KeyCode::Esc => {
+ state.input_mode = false;
+ }
+ KeyCode::Char('c') | KeyCode::Char('C') => {
+ if key_event.modifiers == KeyModifiers::CONTROL {
+ state.input_mode = false;
+ } else {
+ state.input.handle_event(&CrosstermEvent::Key(key_event));
+ }
+ }
+ _ => {
+ state.input.handle_event(&CrosstermEvent::Key(key_event));
+ }
+ }
+ return Ok(());
+ }
+ match key_event.code {
+ KeyCode::Esc | KeyCode::Char('q') => {
+ if state.input_mode {
+ state.input_mode = false;
+ } else {
+ state.quit();
+ }
+ }
+ KeyCode::Char('c') | KeyCode::Char('C') => {
+ if key_event.modifiers == KeyModifiers::CONTROL {
+ state.quit();
+ } else {
+ state.input_mode = true;
+ }
+ }
+ KeyCode::Char('j') => {
+ state.vertical_scroll = 0;
+ state.selected_package = if state.selected_package >= state.packages.len() - 1 {
+ 0
+ } else {
+ state.selected_package + 1
+ }
+ }
+ KeyCode::Up => {
+ state.vertical_scroll += 5;
+ }
+ KeyCode::Char('k') => {
+ state.vertical_scroll = 0;
+ state.selected_package = if state.selected_package == 0 {
+ state.packages.len() - 1
+ } else {
+ state.selected_package - 1
+ }
+ }
+ KeyCode::Down => {
+ if state.vertical_scroll > 1 {
+ state.vertical_scroll = state.vertical_scroll.saturating_sub(5);
+ }
+ }
+ KeyCode::Right => {
+ state.horizontal_scroll += 5;
+ }
+ KeyCode::Left => {
+ state.horizontal_scroll = state.horizontal_scroll.saturating_sub(5);
+ }
+ KeyCode::Enter => sender
+ .send(Event::StartBuild(state.selected_package))
+ .into_diagnostic()?,
+ KeyCode::Char(':') => {
+ state.input.reset();
+ state.input_mode = true;
+ }
+ KeyCode::Char('e') => sender.send(Event::EditRecipe).into_diagnostic()?,
+ _ => {}
+ }
+ Ok(())
+}
+
+/// Handles the mouse events and updates the state.
+pub(crate) fn handle_mouse_events(
+ mouse_event: MouseEvent,
+ sender: mpsc::UnboundedSender,
+ state: &mut TuiState,
+) -> miette::Result<()> {
+ match mouse_event.kind {
+ MouseEventKind::ScrollDown => {
+ if state.vertical_scroll > 1 {
+ state.vertical_scroll = state.vertical_scroll.saturating_sub(5);
+ }
+ }
+ MouseEventKind::ScrollUp => {
+ state.vertical_scroll += 5;
+ }
+ MouseEventKind::ScrollRight => {
+ state.horizontal_scroll += 5;
+ }
+ MouseEventKind::ScrollLeft => {
+ state.horizontal_scroll = state.horizontal_scroll.saturating_sub(5);
+ }
+ MouseEventKind::Moved => {
+ let p = Position::new(mouse_event.column, mouse_event.row);
+ state.packages.iter_mut().for_each(|package| {
+ package.is_hovered = package.area.contains(p);
+ })
+ }
+ MouseEventKind::Down(MouseButton::Left) => {
+ if let Some(selected_pos) = state.packages.iter().position(|p| p.is_hovered) {
+ sender
+ .send(Event::StartBuild(selected_pos))
+ .into_diagnostic()?
+ }
+ }
+ _ => {}
+ }
+ Ok(())
+}
+
+/// Renders the user interface widgets.
+pub(crate) fn render_widgets(state: &mut TuiState, frame: &mut Frame) {
+ frame.render_widget(
+ Block::new()
+ .title_top(Line::from("rattler-build-tui").style(Style::default().bold()))
+ .title_alignment(Alignment::Center),
+ frame.size(),
+ );
+ let rects = Layout::vertical([Constraint::Percentage(100), Constraint::Min(3)])
+ .margin(1)
+ .split(frame.size());
+ frame.render_widget(
+ Paragraph::new(
+ Line::default()
+ .spans(
+ KEY_BINDINGS
+ .iter()
+ .flat_map(|(key, desc)| {
+ vec![
+ "<".fg(Color::Rgb(100, 100, 100)),
+ key.yellow(),
+ ": ".fg(Color::Rgb(100, 100, 100)),
+ Span::from(*desc),
+ "> ".fg(Color::Rgb(100, 100, 100)),
+ ]
+ })
+ .collect::>(),
+ )
+ .alignment(Alignment::Center),
+ )
+ .block(
+ Block::bordered()
+ .title_bottom(Line::from(format!("|{}|", env!("CARGO_PKG_VERSION"))))
+ .title_alignment(Alignment::Right)
+ .border_type(BorderType::Rounded)
+ .border_style(Style::default().fg(Color::Rgb(100, 100, 100))),
+ ),
+ rects[1],
+ );
+ frame.render_widget(
+ Block::new()
+ .title_top(Line::from("rattler-build-tui").style(Style::default().bold()))
+ .title_alignment(Alignment::Center),
+ rects[0],
+ );
+ let rects = Layout::horizontal([Constraint::Percentage(20), Constraint::Percentage(80)])
+ .split(rects[0]);
+ {
+ frame.render_widget(
+ Block::bordered()
+ .title_top("|Packages|".yellow())
+ .title_alignment(Alignment::Center)
+ .border_type(BorderType::Rounded)
+ .border_style(Style::default().fg(Color::Rgb(100, 100, 100))),
+ rects[0],
+ );
+
+ if !state.packages.is_empty() {
+ let rects =
+ Layout::vertical([Constraint::Min(2)].repeat(((rects[0].height - 2) / 3) as usize))
+ .margin(1)
+ .split(rects[0]);
+ for (i, package) in state.packages.iter_mut().enumerate() {
+ package.area = rects[i];
+ frame.render_widget(
+ Block::bordered()
+ .border_type(BorderType::Rounded)
+ .border_style({
+ let mut style = Style::new().fg(package.build_progress.as_color());
+ if package.is_hovered && !package.build_progress.is_building() {
+ style = style.yellow()
+ } else if state.selected_package == i {
+ if package.build_progress.is_building() {
+ style = style.green()
+ } else {
+ style = style.white();
+ }
+ }
+ style
+ }),
+ rects[i],
+ );
+ let item = Layout::horizontal([Constraint::Min(3), Constraint::Percentage(100)])
+ .margin(1)
+ .split(rects[i]);
+ frame.render_stateful_widget(
+ throbber_widgets_tui::Throbber::default()
+ .style(Style::default().fg(Color::Cyan))
+ .throbber_style(
+ Style::default()
+ .fg(package.build_progress.as_color())
+ .add_modifier(Modifier::BOLD),
+ )
+ .throbber_set(throbber_widgets_tui::BLACK_CIRCLE)
+ .use_type(throbber_widgets_tui::WhichUse::Spin),
+ item[0],
+ &mut package.spinner_state,
+ );
+ let mut line = Line::from(vec![
+ package.name.clone().into(),
+ "-".fg(Color::Rgb(100, 100, 100)),
+ package.version.clone().into(),
+ package
+ .build_string
+ .clone()
+ .map(|v| format!("{}{v}", "-".fg(Color::Rgb(100, 100, 100))))
+ .unwrap_or_default()
+ .into(),
+ ]);
+ if item[1].width < line.width() as u16 {
+ line = Line::from(vec![
+ package.name.clone().into(),
+ "-".fg(Color::Rgb(100, 100, 100)),
+ package.version.clone().into(),
+ ]);
+ }
+ frame.render_widget(Paragraph::new(line), item[1]);
+ }
+ }
+ }
+
+ let mut log_lines = state.log.clone();
+ if let Some(selected_package) = state.packages.get(state.selected_package) {
+ log_lines.extend(selected_package.build_log.clone());
+ }
+ let log_lines = log_lines
+ .iter()
+ .map(|l| l.trim_end())
+ .collect::>();
+ let logs = log_lines.join("\n").into_text().unwrap().on_black();
+ let vertical_scroll = (logs.height() as u16)
+ .saturating_sub(rects[1].height.saturating_sub(3))
+ .saturating_sub(state.vertical_scroll);
+ if vertical_scroll == 0 {
+ state.vertical_scroll =
+ (logs.height() as u16).saturating_sub(rects[1].height.saturating_sub(3));
+ }
+
+ let logs_rect = if state.input_mode {
+ let rects =
+ Layout::vertical([Constraint::Percentage(100), Constraint::Min(3)]).split(rects[1]);
+ frame.render_widget(
+ Paragraph::new(Line::from(vec!["> ".yellow(), state.input.value().into()])).block(
+ Block::bordered()
+ .border_type(BorderType::Rounded)
+ .border_style(Style::default().fg(Color::Rgb(100, 100, 100))),
+ ),
+ rects[1],
+ );
+ frame.set_cursor(
+ rects[1].x + state.input.visual_cursor() as u16 + 3,
+ rects[1].y + 1,
+ );
+ rects[0]
+ } else {
+ rects[1]
+ };
+
+ frame.render_widget(
+ Paragraph::new(logs.clone())
+ .block(
+ Block::bordered()
+ .title_top(
+ match state.packages.get(state.selected_package) {
+ Some(package) => {
+ format!("|Build Logs for {}|", package.name)
+ }
+ None => String::from("|Build Logs|"),
+ }
+ .yellow(),
+ )
+ .title_alignment(Alignment::Left)
+ .border_type(BorderType::Rounded)
+ .border_style(Style::default().fg(Color::Rgb(100, 100, 100))),
+ )
+ .scroll((vertical_scroll, state.horizontal_scroll)),
+ logs_rect,
+ );
+
+ let scrollbar = Scrollbar::new(ScrollbarOrientation::VerticalRight)
+ .begin_symbol(Some("↑"))
+ .end_symbol(Some("↓"));
+
+ let mut scrollbar_state =
+ ScrollbarState::new(logs.height().saturating_sub(logs_rect.height.into()))
+ .position(vertical_scroll.into());
+
+ frame.render_stateful_widget(
+ scrollbar,
+ logs_rect.inner(&Margin {
+ vertical: 1,
+ horizontal: 0,
+ }),
+ &mut scrollbar_state,
+ );
+
+ let scrollbar = Scrollbar::new(ScrollbarOrientation::HorizontalBottom)
+ .thumb_symbol("🬋")
+ .begin_symbol(Some("←"))
+ .end_symbol(Some("→"));
+
+ let max_width = logs
+ .lines
+ .iter()
+ .map(|l| l.width())
+ .max()
+ .unwrap_or_default();
+ let content_length = max_width.saturating_sub(logs_rect.width.saturating_sub(2).into());
+ if content_length == 0 {
+ state.horizontal_scroll = 0;
+ }
+ let mut scrollbar_state =
+ ScrollbarState::new(content_length).position(state.horizontal_scroll.into());
+
+ frame.render_stateful_widget(
+ scrollbar,
+ logs_rect.inner(&Margin {
+ vertical: 0,
+ horizontal: 1,
+ }),
+ &mut scrollbar_state,
+ );
+}
diff --git a/src/tui/state.rs b/src/tui/state.rs
new file mode 100644
index 000000000..534143e09
--- /dev/null
+++ b/src/tui/state.rs
@@ -0,0 +1,156 @@
+use std::path::PathBuf;
+
+use ratatui::{layout::Rect, style::Color};
+use throbber_widgets_tui::ThrobberState;
+use tui_input::Input;
+
+use crate::{
+ console_utils::LoggingOutputHandler, get_build_output, metadata::Output, opt::BuildOpts,
+ BuildOutput,
+};
+
+/// Representation of a package.
+#[derive(Clone, Debug)]
+pub struct Package {
+ pub name: String,
+ pub version: String,
+ pub build_string: Option,
+ pub subpackages: Vec,
+ pub build_progress: BuildProgress,
+ pub build_log: Vec,
+ pub spinner_state: ThrobberState,
+ pub area: Rect,
+ pub is_hovered: bool,
+ pub output: Output,
+}
+
+/// Build progress.
+#[derive(Clone, Debug, Default, PartialEq)]
+pub enum BuildProgress {
+ #[default]
+ None,
+ Building,
+ Failed,
+ Done,
+}
+
+impl BuildProgress {
+ /// Returns true if the package is building.
+ pub fn is_building(&self) -> bool {
+ *self == Self::Building
+ }
+
+ /// Returns the corresponding color for the progress.
+ pub fn as_color(&self) -> Color {
+ match self {
+ BuildProgress::None => Color::Rgb(100, 100, 100),
+ BuildProgress::Building => Color::Yellow,
+ BuildProgress::Failed => Color::Red,
+ BuildProgress::Done => Color::Green,
+ }
+ }
+}
+
+/// Application state.
+#[derive(Clone)]
+pub(crate) struct TuiState {
+ /// Build output.
+ pub build_output: Option,
+ /// Build options.
+ pub build_opts: BuildOpts,
+ /// Log handler.
+ pub log_handler: LoggingOutputHandler,
+ /// Is the application running?
+ pub running: bool,
+ /// Packages to build.
+ pub packages: Vec,
+ /// Index of the selected package.
+ pub selected_package: usize,
+ /// Vertical scroll value.
+ pub vertical_scroll: u16,
+ /// Horizontal scroll value.
+ pub horizontal_scroll: u16,
+ /// Application log.
+ pub log: Vec,
+ /// Is the input mode enabled?
+ pub input_mode: bool,
+ /// Current value of the prompt input.
+ pub input: Input,
+}
+
+impl TuiState {
+ /// Constructs a new instance.
+ pub fn new(build_opts: BuildOpts, log_handler: LoggingOutputHandler) -> Self {
+ Self {
+ build_output: None,
+ build_opts: build_opts.clone(),
+ log_handler,
+ running: true,
+ packages: Vec::new(),
+ selected_package: 0,
+ vertical_scroll: 0,
+ horizontal_scroll: 0,
+ log: Vec::new(),
+ input_mode: false,
+ input: Input::default(),
+ }
+ }
+
+ /// Resolves and returns the packages to build.
+ pub async fn resolve_packages(
+ &self,
+ recipe_path: PathBuf,
+ ) -> miette::Result<(BuildOutput, Vec)> {
+ let build_output = get_build_output(
+ self.build_opts.clone(),
+ recipe_path,
+ self.log_handler.clone(),
+ )
+ .await?;
+ let packages = build_output
+ .outputs
+ .iter()
+ .map(|output| {
+ let name = output.name().as_normalized().to_string();
+ Package {
+ name: name.clone(),
+ version: output.version().to_string(),
+ build_string: output.build_string().map(String::from),
+ subpackages: output
+ .build_configuration
+ .subpackages
+ .keys()
+ .map(|v| v.as_normalized().to_string())
+ .filter(|v| v != &name)
+ .collect(),
+ build_progress: BuildProgress::None,
+ build_log: Vec::new(),
+ spinner_state: ThrobberState::default(),
+ area: Rect::default(),
+ is_hovered: false,
+ output: output.clone(),
+ }
+ })
+ .collect();
+ Ok((build_output, packages))
+ }
+
+ /// Handles the tick event of the terminal.
+ pub fn tick(&mut self) {
+ self.packages.iter_mut().for_each(|package| {
+ if package.build_progress.is_building() {
+ package.spinner_state.calc_next();
+ }
+ })
+ }
+
+ /// Set running to false to quit the application.
+ pub fn quit(&mut self) {
+ self.running = false;
+ }
+
+ /// Returns true if a package is building currently.
+ pub fn is_building_package(&self) -> bool {
+ self.packages.iter().any(|p| p.build_progress.is_building())
+ }
+}
diff --git a/src/tui/utils.rs b/src/tui/utils.rs
new file mode 100644
index 000000000..357f510db
--- /dev/null
+++ b/src/tui/utils.rs
@@ -0,0 +1,11 @@
+use miette::IntoDiagnostic;
+use std::env;
+use std::path::Path;
+use std::process::Command;
+
+/// Runs the user's default editor to edit content.
+pub fn run_editor(path: &Path) -> miette::Result<()> {
+ let editor = env::var("EDITOR").into_diagnostic()?;
+ Command::new(editor).arg(path).status().into_diagnostic()?;
+ Ok(())
+}
diff --git a/src/upload/conda_forge.rs b/src/upload/conda_forge.rs
index ee247e9aa..68a97d1d3 100644
--- a/src/upload/conda_forge.rs
+++ b/src/upload/conda_forge.rs
@@ -1,3 +1,5 @@
+//! Conda-forge package uploader.
+
use std::{
collections::HashMap,
path::{Path, PathBuf},
@@ -6,7 +8,7 @@ use std::{
use miette::{miette, IntoDiagnostic};
use tracing::{debug, info};
-use crate::{upload::get_default_client, CondaForgeOpts};
+use crate::{opt::CondaForgeOpts, upload::get_default_client};
use super::{
anaconda,
@@ -41,6 +43,7 @@ async fn get_channel_target_from_variant_config(
Ok(label.to_string())
}
+/// Uploads the package conda forge.
pub async fn upload_packages_to_conda_forge(
opts: CondaForgeOpts,
package_files: &Vec,
diff --git a/src/upload/mod.rs b/src/upload/mod.rs
index cdae07dfe..017c28ddb 100644
--- a/src/upload/mod.rs
+++ b/src/upload/mod.rs
@@ -1,6 +1,8 @@
+//! The upload module provides the package upload functionality.
+
+use crate::tool_configuration::APP_USER_AGENT;
use futures::TryStreamExt;
use indicatif::{style::TemplateError, HumanBytes, ProgressState};
-use rattler_build::tool_configuration::APP_USER_AGENT;
use std::{
fmt::Write,
path::{Path, PathBuf},
@@ -47,6 +49,7 @@ fn get_default_client() -> Result {
.build()
}
+/// Uploads package files to a Quetz server.
pub async fn upload_package_to_quetz(
storage: &AuthenticationStorage,
api_key: Option,
@@ -101,6 +104,7 @@ pub async fn upload_package_to_quetz(
Ok(())
}
+/// Uploads package files to an Artifactory server.
pub async fn upload_package_to_artifactory(
storage: &AuthenticationStorage,
username: Option,
@@ -166,6 +170,7 @@ pub async fn upload_package_to_artifactory(
Ok(())
}
+/// Uploads package files to a prefix.dev server.
pub async fn upload_package_to_prefix(
storage: &AuthenticationStorage,
api_key: Option,
@@ -227,6 +232,7 @@ pub async fn upload_package_to_prefix(
Ok(())
}
+/// Uploads package files to an Anaconda server.
pub async fn upload_package_to_anaconda(
storage: &AuthenticationStorage,
token: Option,