From 06f14870086225541770c55740b5cbefec5b2cd7 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Thu, 27 Mar 2025 17:03:35 +0100 Subject: [PATCH 01/12] feat(xtask): Add xtasks xtasks allow us to write rust implementations for common tasks that happen around the development of Brane. The advantages of writing these tasks in rust itself, is that is does not pose any additional dependencies besides the once that we already have. Additionally, it provides us with the full power of the rust language we in automating these common tasks Inspired by: https://github.com/matklad/cargo-xtask We start off with two subcommands: completions and manpage generation This also allows us to remove the pesky completion binaries from the brane-ctl and brane-cli workspace members. --- .cargo/config.toml | 3 + Cargo.lock | 33 +++++++++++ Cargo.toml | 2 + brane-cli/Cargo.toml | 5 -- brane-cli/src/completions.rs | 17 ------ brane-ctl/Cargo.toml | 5 -- brane-ctl/src/completions.rs | 18 ------ xtask/Cargo.toml | 26 +++++++++ xtask/src/main.rs | 103 +++++++++++++++++++++++++++++++++++ 9 files changed, 167 insertions(+), 45 deletions(-) delete mode 100644 brane-cli/src/completions.rs delete mode 100644 brane-ctl/src/completions.rs create mode 100644 xtask/Cargo.toml create mode 100644 xtask/src/main.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 37395f2d..ebdbc6d5 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -5,3 +5,6 @@ # For building branelet and stuff for M1 macs on Linux: [target.aarch64-unknown-linux-musl] linker = "aarch64-linux-musl-gcc" + +[alias] +xtask = "run --package xtask --" diff --git a/Cargo.lock b/Cargo.lock index 4948cf6a..f0640396 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1182,6 +1182,16 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +[[package]] +name = "clap_mangen" +version = "0.2.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "724842fa9b144f9b89b3f3d371a89f3455eea660361d13a554f68f8ae5d6c13a" +dependencies = [ + "clap", + "roff", +] + [[package]] name = "clipboard-win" version = "5.4.0" @@ -3842,6 +3852,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "roff" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3" + [[package]] name = "rust-argon2" version = "0.8.3" @@ -5820,6 +5836,23 @@ dependencies = [ "rustix", ] +[[package]] +name = "xtask" +version = "3.0.0" +dependencies = [ + "brane-cfg", + "brane-cli", + "brane-ctl", + "brane-shr 3.0.0", + "brane-tsk", + "clap", + "clap_complete", + "clap_mangen", + "humantime", + "jsonwebtoken", + "specifications 3.0.0", +] + [[package]] name = "yaml-rust2" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index a1ae1d91..8eafd5a8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,6 +50,8 @@ members = [ # # TODO # "brane-log", + + "xtask", ] diff --git a/brane-cli/Cargo.toml b/brane-cli/Cargo.toml index 0dcb4b64..ed9f2da1 100644 --- a/brane-cli/Cargo.toml +++ b/brane-cli/Cargo.toml @@ -12,11 +12,6 @@ license.workspace = true name = "brane" path = "src/main.rs" -[[bin]] -name = "brane-completions" -path = "src/completions.rs" -doc = false - [dependencies] anyhow = "1.0.66" async-trait = "0.1.67" diff --git a/brane-cli/src/completions.rs b/brane-cli/src/completions.rs deleted file mode 100644 index c41f8659..00000000 --- a/brane-cli/src/completions.rs +++ /dev/null @@ -1,17 +0,0 @@ -use std::fs::File; - -use clap::CommandFactory; -use clap_complete::generate; -use clap_complete::shells::Shell::*; - -mod cli; -use cli::*; - -fn main() { - let mut command = Cli::command(); - - for (filename, shell) in [("brane.bash", Bash), ("brane.fish", Fish), ("brane.zsh", Zsh)] { - let mut file = File::create(filename).expect("Could not open/create completions file"); - generate(shell, &mut command, "brane", &mut file); - } -} diff --git a/brane-ctl/Cargo.toml b/brane-ctl/Cargo.toml index 6f9356f7..66056fe3 100644 --- a/brane-ctl/Cargo.toml +++ b/brane-ctl/Cargo.toml @@ -12,11 +12,6 @@ license.workspace = true name = "branectl" path = "src/main.rs" -[[bin]] -name = "branectl-completions" -path = "src/completions.rs" -doc = false - [dependencies] base64ct = "1.6.0" bollard = "0.18.0" diff --git a/brane-ctl/src/completions.rs b/brane-ctl/src/completions.rs deleted file mode 100644 index 17ac3580..00000000 --- a/brane-ctl/src/completions.rs +++ /dev/null @@ -1,18 +0,0 @@ -#![allow(dead_code)] -use std::fs::File; - -use clap::CommandFactory; -use clap_complete::generate; -use clap_complete::shells::Shell::*; - -mod cli; -use cli::*; - -fn main() { - let mut command = Cli::command(); - - for (filename, shell) in [("branectl.bash", Bash), ("branectl.fish", Fish), ("branectl.zsh", Zsh)] { - let mut file = File::create(filename).expect("Could not open/create completions file"); - generate(shell, &mut command, "branectl", &mut file); - } -} diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml new file mode 100644 index 00000000..3a63c5b6 --- /dev/null +++ b/xtask/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "xtask" +version.workspace = true +edition.workspace = true +authors.workspace = true +description.workspace = true +repository.workspace = true +documentation.workspace = true +license.workspace = true + +[dependencies] +clap = "4.0.0" +humantime = "2.0.0" +jsonwebtoken = "9.0.0" +clap_complete = "4.0.0" + +brane-cfg = { path = "../brane-cfg" } +brane-ctl = { path = "../brane-ctl" } +brane-cli = { path = "../brane-cli" } +brane-shr = { path = "../brane-shr" } +brane-tsk = { path = "../brane-tsk" } +specifications = { path = "../specifications" } +clap_mangen = "0.2.26" + +[lints] +workspace = true diff --git a/xtask/src/main.rs b/xtask/src/main.rs new file mode 100644 index 00000000..01f466c1 --- /dev/null +++ b/xtask/src/main.rs @@ -0,0 +1,103 @@ +#![allow(dead_code)] +use std::fs::File; +use std::path::PathBuf; + +use clap::{CommandFactory, Parser, Subcommand, ValueEnum}; +use clap_complete::generate; +use clap_complete::shells::Shell; + +const SHELLS: [(&str, Shell); 3] = [("bash", Shell::Bash), ("fish", Shell::Fish), ("zsh", Shell::Zsh)]; + +mod completions { + pub(crate) mod ctl { + include!("../../brane-ctl/src/cli.rs"); + } + pub(crate) mod cli { + include!("../../brane-cli/src/cli.rs"); + } +} + +#[derive(Debug, Parser)] +#[clap(name = "xtask")] +struct Arguments { + #[clap(subcommand)] + pub(crate) subcommand: XTaskSubcommand, +} + +#[derive(Debug, Subcommand)] +enum XTaskSubcommand { + #[clap(name = "completions")] + Completions { + #[clap(long)] + shell: Option, + #[clap(name = "binary")] + binary: Binary, + }, + #[clap(name = "manpage")] + ManPage { binary: Binary }, +} + +#[derive(Debug, Clone, Copy, ValueEnum)] +enum Binary { + #[clap(name = "branectl")] + Branectl, + #[clap(name = "brane")] + Brane, + #[clap(name = "xtask")] + XTask, +} + +impl Binary { + fn to_binary_name(self) -> &'static str { + use Binary::*; + match self { + Branectl => "branectl", + Brane => "brane", + XTask => "xtask", + } + } + + fn to_command(self) -> clap::Command { + use Binary::*; + match self { + Branectl => completions::ctl::Arguments::command(), + Brane => completions::cli::Cli::command(), + XTask => Arguments::command(), + } + } +} + +fn main() { + let opts = Arguments::parse(); + match opts.subcommand { + XTaskSubcommand::Completions { binary, shell } => { + let bin_name = binary.to_binary_name(); + let mut command = binary.to_command(); + + if let Some(shell) = shell { + if let Some((extension, sh)) = SHELLS.iter().find(|(name, _)| *name == shell) { + let mut file = File::create(format!("{bin_name}.{extension}")).expect("Could not open/create completions file"); + generate(*sh, &mut command, bin_name, &mut file); + } + } else { + for (extension, shell) in SHELLS { + let mut file = File::create(format!("{bin_name}.{extension}")).expect("Could not open/create completions file"); + generate(shell, &mut command, bin_name, &mut file); + } + } + }, + XTaskSubcommand::ManPage { binary } => { + let out_dir = PathBuf::from("./"); + + let name = binary.to_binary_name(); + let command = binary.to_command(); + + let man = clap_mangen::Man::new(command); + + let mut buffer: Vec = Default::default(); + man.render(&mut buffer).unwrap(); + + std::fs::write(out_dir.join(format!("{name}.1")), buffer).unwrap(); + }, + } +} From b5a93c78e53095b2e7caaeaedef49569ea44cda0 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Fri, 28 Mar 2025 10:20:15 +0100 Subject: [PATCH 02/12] feat(xtask): Add package command Also restructure xtasks into different modules. --- Cargo.lock | 19 ++++ brane-api/src/cli.rs | 1 - xtask/Cargo.toml | 47 ++++++-- xtask/src/cli.rs | 45 ++++++++ xtask/src/completions.rs | 27 +++++ xtask/src/external_cli.rs | 24 ++++ xtask/src/install.rs | 66 +++++++++++ xtask/src/main.rs | 231 ++++++++++++++++++++++++++------------ xtask/src/man.rs | 38 +++++++ xtask/src/package.rs | 47 ++++++++ xtask/src/registry.rs | 219 ++++++++++++++++++++++++++++++++++++ xtask/src/utilities.rs | 95 ++++++++++++++++ 12 files changed, 776 insertions(+), 83 deletions(-) create mode 100644 xtask/src/cli.rs create mode 100644 xtask/src/completions.rs create mode 100644 xtask/src/external_cli.rs create mode 100644 xtask/src/install.rs create mode 100644 xtask/src/man.rs create mode 100644 xtask/src/package.rs create mode 100644 xtask/src/registry.rs create mode 100644 xtask/src/utilities.rs diff --git a/Cargo.lock b/Cargo.lock index f0640396..186fa891 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1511,6 +1511,15 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "directories" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" +dependencies = [ + "dirs-sys", +] + [[package]] name = "dirs" version = "1.0.5" @@ -5840,17 +5849,27 @@ dependencies = [ name = "xtask" version = "3.0.0" dependencies = [ + "anyhow", + "async-compression 0.4.20", + "brane-cc", "brane-cfg", "brane-cli", "brane-ctl", + "brane-dsl 3.0.0", "brane-shr 3.0.0", "brane-tsk", "clap", "clap_complete", "clap_mangen", + "directories", + "flate2", "humantime", "jsonwebtoken", "specifications 3.0.0", + "strum 0.27.1", + "tar", + "tokio", + "tracing", ] [[package]] diff --git a/brane-api/src/cli.rs b/brane-api/src/cli.rs index 2b94d422..8180c410 100644 --- a/brane-api/src/cli.rs +++ b/brane-api/src/cli.rs @@ -1,4 +1,3 @@ -//! This module contains all logic and structures with respect to argument handling and invocation of this command use std::path::PathBuf; use clap::Parser; diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 3a63c5b6..6d367b12 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -9,18 +9,43 @@ documentation.workspace = true license.workspace = true [dependencies] -clap = "4.0.0" -humantime = "2.0.0" -jsonwebtoken = "9.0.0" -clap_complete = "4.0.0" - -brane-cfg = { path = "../brane-cfg" } -brane-ctl = { path = "../brane-ctl" } -brane-cli = { path = "../brane-cli" } -brane-shr = { path = "../brane-shr" } -brane-tsk = { path = "../brane-tsk" } -specifications = { path = "../specifications" } +anyhow = "1.0.66" +async-compression = { version = "0.4.0", features = ["tokio", "gzip"] } +clap = { version = "4.5.6", features = ["derive", "string"] } +clap_complete = "4.5.8" clap_mangen = "0.2.26" +directories = "6.0.0" +flate2 = { version = "1.0.13" } +specifications = { path = "../specifications" } +strum = { version = "0.27.1", features = ["derive", "strum_macros"] } +tar = "0.4.0" +tokio = "1.44.0" +tracing = "0.1.41" + +# Only necessary for any task that interacts with the cli arguments +brane-cfg = { path = "../brane-cfg", optional = true } +brane-ctl = { path = "../brane-ctl", optional = true } +brane-cli = { path = "../brane-cli", optional = true } +brane-shr = { path = "../brane-shr", optional = true } +brane-tsk = { path = "../brane-tsk", optional = true } +brane-dsl = { path = "../brane-dsl", optional = true } +brane-cc = { path = "../brane-cc", optional = true } +humantime = { version = "2.1.0", optional = true } +jsonwebtoken = { version = "9.2.0", optional = true } + +[features] +default = ["cli"] +cli = [ + "dep:brane-cfg", + "dep:brane-ctl", + "dep:brane-cli", + "dep:brane-shr", + "dep:brane-tsk", + "dep:brane-dsl", + "dep:brane-cc", + "dep:humantime", + "dep:jsonwebtoken", +] [lints] workspace = true diff --git a/xtask/src/cli.rs b/xtask/src/cli.rs new file mode 100644 index 00000000..28760e91 --- /dev/null +++ b/xtask/src/cli.rs @@ -0,0 +1,45 @@ +pub(crate) mod xtask { + use clap::{Parser, Subcommand, ValueEnum}; + #[cfg(feature = "cli")] + use clap_complete::Shell; + + #[cfg(feature = "cli")] + use crate::{Binary, Target}; + + #[derive(Debug, Parser)] + #[clap(name = "xtask")] + pub(crate) struct Cli { + #[clap(subcommand)] + pub(crate) subcommand: XTaskSubcommand, + } + + #[derive(Debug, Subcommand)] + pub(crate) enum XTaskSubcommand { + #[cfg(feature = "cli")] + Completions { + #[clap(short, long)] + shell: Option, + #[clap(short, long)] + binary: Option, + }, + #[cfg(feature = "cli")] + Man { + #[clap(short, long)] + target: Option, + }, + #[cfg(feature = "cli")] + Install { + #[clap(short, long, help = "Create all necessary directories")] + force: bool, + }, + Package { + kind: PackageKind, + }, + } + + #[derive(ValueEnum, Debug, Clone)] + pub(crate) enum PackageKind { + #[clap(name = "github")] + GitHub, + } +} diff --git a/xtask/src/completions.rs b/xtask/src/completions.rs new file mode 100644 index 00000000..ef5360ed --- /dev/null +++ b/xtask/src/completions.rs @@ -0,0 +1,27 @@ +use std::fs::File; + +use clap_complete::{Generator, Shell}; +use strum::IntoEnumIterator; + +use crate::{Binary, SHELLS}; + +pub(crate) fn generate(binary: Option, shell: Option) { + let shells_to_do = match shell { + Some(shell) => &[shell][..], + None => &SHELLS[..], + }; + + let binaries_to_do = match binary { + Some(binary) => &[binary][..], + None => &Binary::iter().collect::>()[..], + }; + + for shell in shells_to_do { + for binary in binaries_to_do { + let mut command = binary.to_command(); + let bin_name = command.get_name().to_owned(); + let mut file = File::create(shell.file_name(&bin_name)).expect("Could not open/create completions file"); + clap_complete::generate(*shell, &mut command, &bin_name, &mut file); + } + } +} diff --git a/xtask/src/external_cli.rs b/xtask/src/external_cli.rs new file mode 100644 index 00000000..fcf2e387 --- /dev/null +++ b/xtask/src/external_cli.rs @@ -0,0 +1,24 @@ +#[macro_export] +macro_rules! include_cli { + ($x:ident) => { + pub(crate) mod $x { + //! test + include!(concat!("../../brane-", stringify!($x), "/src/cli.rs")); + } + }; +} + +include_cli!(ctl); +include_cli!(cli); +include_cli!(cc); +include_cli!(reg); +include_cli!(api); +include_cli!(plr); +include_cli!(prx); +include_cli!(job); +include_cli!(drv); + +// who named one of our packages 'let'...? +pub(crate) mod blet { + include!("../../brane-let/src/cli.rs"); +} diff --git a/xtask/src/install.rs b/xtask/src/install.rs new file mode 100644 index 00000000..af6091f5 --- /dev/null +++ b/xtask/src/install.rs @@ -0,0 +1,66 @@ +use std::fs::File; +use std::path::PathBuf; + +use anyhow::{Context as _, bail}; +use clap_complete::{Generator, Shell, generate}; +use strum::IntoEnumIterator; + +use crate::Binary; + +pub(crate) fn completions(force: bool) -> anyhow::Result<()> { + let base_dir = directories::BaseDirs::new().context("Could not determine directories in which to install")?; + + for (shell, location) in [ + (Shell::Bash, base_dir.data_local_dir().join("bash-completion/completions")), + (Shell::Fish, base_dir.data_local_dir().join("fish/vendor_completions.d")), + (Shell::Zsh, base_dir.data_local_dir().join("zsh/site-functions")), + ] { + if !location.exists() { + if force { + std::fs::create_dir_all(&location).context("Attempted to create completion directory")?; + } else { + bail!("Completion directory for {shell} does not exist, and command was not ran with --force"); + } + } + + for binary in Binary::iter() { + let mut command = binary.to_command(); + let bin_name = command.get_name().to_owned(); + + let completion_filename = shell.file_name(&bin_name); + + let path = location.join(completion_filename); + tracing::debug!("Creating {path:?}"); + let mut file = File::create(path).context("Attempted to create completion file")?; + generate(shell, &mut command, bin_name, &mut file); + } + } + + Ok(()) +} + +pub(crate) fn binaries(force: bool) -> anyhow::Result<()> { + let target_directory = PathBuf::from("./target/release"); + + let base_dir = directories::BaseDirs::new().context("Could not determine directories in which to install")?; + + let dest_dir = base_dir.executable_dir().context("Could not determine the directories in which to install")?; + if !dest_dir.exists() { + if force { + std::fs::create_dir_all(dest_dir).context("Could not create required directories for installing the binaries")?; + } else { + bail!("Executable directory '{exec_dir}' does not exist, and was ran without --force", exec_dir = dest_dir.display()); + } + } + + for binary in Binary::iter() { + let bin_name = binary.to_command().get_name().to_owned(); + let src_path = target_directory.join(&bin_name); + + eprintln!("{src_path:?} -> {dest_dir:?}"); + std::fs::copy(src_path, dest_dir.join(&bin_name)) + .with_context(|| format!("Unable to install binaries in '{exec_dir}'", exec_dir = dest_dir.display()))?; + } + + Ok(()) +} diff --git a/xtask/src/main.rs b/xtask/src/main.rs index 01f466c1..caac995e 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -1,103 +1,192 @@ #![allow(dead_code)] -use std::fs::File; -use std::path::PathBuf; -use clap::{CommandFactory, Parser, Subcommand, ValueEnum}; -use clap_complete::generate; +mod cli; +mod package; +mod registry; +mod utilities; + +#[cfg(feature = "cli")] +mod completions; +#[cfg(feature = "cli")] +mod external_cli; +#[cfg(feature = "cli")] +mod install; +#[cfg(feature = "cli")] +mod man; + +use anyhow::Context as _; +use clap::Parser; use clap_complete::shells::Shell; +#[cfg(feature = "cli")] +use { + clap::builder::PossibleValue, + clap::{CommandFactory, ValueEnum}, + std::sync::OnceLock, + strum::{EnumIter, IntoEnumIterator}, +}; -const SHELLS: [(&str, Shell); 3] = [("bash", Shell::Bash), ("fish", Shell::Fish), ("zsh", Shell::Zsh)]; +const SHELLS: [Shell; 3] = [Shell::Bash, Shell::Fish, Shell::Zsh]; -mod completions { - pub(crate) mod ctl { - include!("../../brane-ctl/src/cli.rs"); - } - pub(crate) mod cli { - include!("../../brane-cli/src/cli.rs"); +#[tokio::main(flavor = "current_thread")] +async fn main() -> anyhow::Result<()> { + let opts = cli::xtask::Cli::parse(); + use cli::xtask::XTaskSubcommand; + match opts.subcommand { + #[cfg(feature = "cli")] + XTaskSubcommand::Completions { binary, shell } => { + completions::generate(binary, shell); + }, + #[cfg(feature = "cli")] + XTaskSubcommand::Man { target } => { + let targets = match target { + Some(target) => &[target][..], + None => Target::value_variants(), + }; + for target in targets { + man::create_recursive(target.to_command(), "", true)?; + } + }, + #[cfg(feature = "cli")] + XTaskSubcommand::Install { force } => { + install::completions(force)?; + install::binaries(force)?; + }, + XTaskSubcommand::Package { kind } => match kind { + cli::xtask::PackageKind::GitHub => { + package::create_github_package().await.context("Could not create package for GitHub")?; + }, + }, } + + Ok(()) } -#[derive(Debug, Parser)] -#[clap(name = "xtask")] -struct Arguments { - #[clap(subcommand)] - pub(crate) subcommand: XTaskSubcommand, +#[cfg(feature = "cli")] +#[derive(Debug, Clone, Copy)] +pub(crate) enum Target { + Binary(Binary), + ContainerBinary(ContainerBinary), + Image(Image), } -#[derive(Debug, Subcommand)] -enum XTaskSubcommand { - #[clap(name = "completions")] - Completions { - #[clap(long)] - shell: Option, - #[clap(name = "binary")] - binary: Binary, - }, - #[clap(name = "manpage")] - ManPage { binary: Binary }, +#[cfg(feature = "cli")] +impl ValueEnum for Target { + fn value_variants<'a>() -> &'a [Self] { + static INSTANCE: OnceLock> = OnceLock::new(); + + INSTANCE.get_or_init(|| { + std::iter::empty() + .chain(Binary::iter().map(Self::Binary)) + .chain(ContainerBinary::iter().map(Self::ContainerBinary)) + .chain(Image::iter().map(Self::Image)) + .collect::>() + }) + } + + fn to_possible_value(&self) -> Option { + match self { + Target::Binary(b) => b.to_possible_value(), + Target::ContainerBinary(c) => c.to_possible_value(), + Target::Image(i) => i.to_possible_value(), + } + } +} + +#[cfg(feature = "cli")] +impl Target { + pub(crate) fn to_command(self) -> clap::Command { + match self { + Target::Binary(x) => x.to_command(), + Target::ContainerBinary(x) => x.to_command(), + Target::Image(x) => x.to_command(), + } + } } -#[derive(Debug, Clone, Copy, ValueEnum)] -enum Binary { +#[cfg(feature = "cli")] +#[derive(Debug, Clone, Copy, ValueEnum, EnumIter)] +pub(crate) enum Binary { + // Binaries #[clap(name = "branectl")] - Branectl, + BraneCtl, #[clap(name = "brane")] Brane, + #[clap(name = "branec")] + BraneC, + #[clap(name = "xtask")] XTask, } +#[cfg(feature = "cli")] +#[derive(Debug, Clone, Copy, ValueEnum, EnumIter)] +pub(crate) enum ContainerBinary { + // Images + #[clap(name = "branelet")] + BraneLet, +} + +#[cfg(feature = "cli")] +#[derive(Debug, Clone, Copy, ValueEnum, EnumIter)] +pub(crate) enum Image { + #[clap(name = "brane-api")] + BraneAPI, + #[clap(name = "brane-drv")] + BraneDrv, + #[clap(name = "brane-job")] + BraneJob, + #[clap(name = "brane-plr")] + BranePlr, + #[clap(name = "brane-prx")] + BranePrx, + #[clap(name = "brane-reg")] + BraneReg, +} + +#[cfg(feature = "cli")] impl Binary { - fn to_binary_name(self) -> &'static str { + // pub(crate) fn to_binary_name(self) -> &'static str { + // use Binary::*; + // match self { + // Branectl => "branectl", + // Brane => "brane", + // BraneC => "branec", + // + // XTask => "xtask", + // } + // } + + pub(crate) fn to_command(self) -> clap::Command { use Binary::*; match self { - Branectl => "branectl", - Brane => "brane", - XTask => "xtask", + BraneCtl => crate::external_cli::ctl::Cli::command(), + Brane => crate::external_cli::cli::Cli::command(), + BraneC => crate::external_cli::cc::Cli::command(), + + XTask => crate::cli::xtask::Cli::command(), } } +} - fn to_command(self) -> clap::Command { - use Binary::*; +#[cfg(feature = "cli")] +impl ContainerBinary { + pub(crate) fn to_command(self) -> clap::Command { match self { - Branectl => completions::ctl::Arguments::command(), - Brane => completions::cli::Cli::command(), - XTask => Arguments::command(), + ContainerBinary::BraneLet => crate::external_cli::blet::Cli::command(), } } } -fn main() { - let opts = Arguments::parse(); - match opts.subcommand { - XTaskSubcommand::Completions { binary, shell } => { - let bin_name = binary.to_binary_name(); - let mut command = binary.to_command(); - - if let Some(shell) = shell { - if let Some((extension, sh)) = SHELLS.iter().find(|(name, _)| *name == shell) { - let mut file = File::create(format!("{bin_name}.{extension}")).expect("Could not open/create completions file"); - generate(*sh, &mut command, bin_name, &mut file); - } - } else { - for (extension, shell) in SHELLS { - let mut file = File::create(format!("{bin_name}.{extension}")).expect("Could not open/create completions file"); - generate(shell, &mut command, bin_name, &mut file); - } - } - }, - XTaskSubcommand::ManPage { binary } => { - let out_dir = PathBuf::from("./"); - - let name = binary.to_binary_name(); - let command = binary.to_command(); - - let man = clap_mangen::Man::new(command); - - let mut buffer: Vec = Default::default(); - man.render(&mut buffer).unwrap(); - - std::fs::write(out_dir.join(format!("{name}.1")), buffer).unwrap(); - }, +#[cfg(feature = "cli")] +impl Image { + pub(crate) fn to_command(self) -> clap::Command { + match self { + Image::BraneAPI => crate::external_cli::api::Cli::command(), + Image::BraneDrv => crate::external_cli::drv::Cli::command(), + Image::BraneJob => crate::external_cli::job::Cli::command(), + Image::BranePlr => crate::external_cli::plr::Cli::command(), + Image::BranePrx => crate::external_cli::prx::Cli::command(), + Image::BraneReg => crate::external_cli::reg::Cli::command(), + } } } diff --git a/xtask/src/man.rs b/xtask/src/man.rs new file mode 100644 index 00000000..35b2d9d7 --- /dev/null +++ b/xtask/src/man.rs @@ -0,0 +1,38 @@ +use std::io::BufWriter; +use std::path::PathBuf; + +use anyhow::Context; +use clap::Command; + +pub(crate) fn create_recursive(command: Command, prefix: &str, compressed: bool) -> anyhow::Result<()> { + let out_dir = PathBuf::from("./manpages"); + + if !out_dir.exists() { + std::fs::create_dir(&out_dir).context("Creating output directory failed")?; + } + + let subcommand_name = command.get_name(); + let total_command_name = if prefix.is_empty() { String::from(subcommand_name) } else { format!("{prefix}-{subcommand_name}") }; + let command = command.name(&total_command_name); + + let man = clap_mangen::Man::new(command.clone()); + + let filename = out_dir.join(man.get_filename()); + + if compressed { + let filename = format!("{filename}.gz", filename = filename.display()); + let output = BufWriter::new(std::fs::File::create(filename)?); + let mut encoder = flate2::write::GzEncoder::new(output, flate2::Compression::default()); + man.render(&mut encoder).context("Could not render man page")?; + } else { + let output = std::fs::File::create(filename)?; + let mut buffer = BufWriter::new(output); + man.render(&mut buffer).context("Could not render man page")?; + } + + for subcommand in command.get_subcommands() { + create_recursive(subcommand.clone(), &total_command_name, compressed)? + } + + Ok(()) +} diff --git a/xtask/src/package.rs b/xtask/src/package.rs new file mode 100644 index 00000000..928e562c --- /dev/null +++ b/xtask/src/package.rs @@ -0,0 +1,47 @@ +use std::env::consts::*; +use std::path::PathBuf; + +use anyhow::Context; + +use crate::registry; +use crate::utilities::{compress_file, create_tar_gz, format_release_binary_name, format_src_binary_name, format_src_library_name}; + +pub(crate) async fn create_github_package() -> anyhow::Result<()> { + eprintln!("Creating a package for: {os} {arch}", os = OS, arch = ARCH); + + let registry = registry::REGISTRY.get_or_init(registry::build_registry); + + let src_dir = PathBuf::from("target/release"); + let dst_dir = PathBuf::from("target/package/release"); + + if !dst_dir.exists() { + std::fs::create_dir_all(&dst_dir).context("Could not create all dirs leading up to destination dir")? + } + + // CREATE BINARIES + for (src, dst) in registry + .search_for_system("binaries", OS, ARCH) + .map(|target| (format_src_binary_name(&target.output_name), format_release_binary_name(&target.output_name))) + { + std::fs::copy(src_dir.join(&src), dst_dir.join(&dst)).with_context(|| format!("Could not copy over file: {src}"))?; + } + + // CREATE LIBRARIES + for target in registry.search_for_system("library", OS, ARCH) { + compress_file(src_dir.join(format_src_library_name(&target.output_name)), dst_dir.join(format_src_library_name(&target.output_name))) + .await + .with_context(|| format!("Could not compress {library_name}", library_name = target.output_name))?; + } + + // CREATE CENTRAL INSTANCE ARCHIVE + let central_instance_dst = format!("central-instance-{arch}.tar.gz", arch = ARCH); + let files: Vec<_> = registry.search_for_system("central", OS, ARCH).map(|target| src_dir.join(target.output_name)).collect(); + create_tar_gz(dst_dir.join(¢ral_instance_dst), files).context("Could not create 'central-instance' tar archive")?; + + // CREATE WORKER INSTANCE ARCHIVE + let worker_instance_dst = format!("worker-instance-{arch}.tar.gz", arch = ARCH); + let files: Vec<_> = registry.search_for_system("worker", OS, ARCH).map(|target| src_dir.join(target.output_name)).collect(); + create_tar_gz(dst_dir.join(&worker_instance_dst), files).context("Could not create 'worker-instance' tar archive")?; + + Ok(()) +} diff --git a/xtask/src/registry.rs b/xtask/src/registry.rs new file mode 100644 index 00000000..bdb8568d --- /dev/null +++ b/xtask/src/registry.rs @@ -0,0 +1,219 @@ +use std::hash::Hash; +use std::path::PathBuf; +use std::sync::{Arc, OnceLock}; + +use crate::utilities::create_dir_with_cachetag; + +pub static REGISTRY: OnceLock = OnceLock::new(); + +pub type BuildFunc = dyn Fn(BuildFuncInfo) -> anyhow::Result<()> + Sync + Send; + +pub struct BuildFuncInfo { + pub out_dir: PathBuf, + pub target_arch: String, +} + +#[derive(Clone)] +pub struct Target { + pub package_name: String, + pub output_name: String, + + pub platforms: Vec<(String, String)>, + pub groups: Vec, + + pub build_command: Arc, +} + +impl Hash for Target { + fn hash(&self, state: &mut H) { + self.package_name.hash(state); + self.output_name.hash(state); + self.platforms.hash(state); + self.groups.hash(state); + } +} + +impl PartialEq for Target { + // Implemented by hand to ignore build_command + fn eq(&self, other: &Self) -> bool { + self.package_name == other.package_name + && self.output_name == other.output_name + && self.platforms == other.platforms + && self.groups == other.groups + } +} + +impl Eq for Target {} + +impl Target { + pub fn new(name: &str, output_name: &str, groups: &[&str], platforms: &[(&str, &str)], build_command: Arc) -> Self { + Self { + package_name: name.to_owned(), + output_name: output_name.to_owned(), + platforms: platforms.iter().map(|(x, y)| (x.to_string(), y.to_string())).collect(), + groups: groups.iter().map(|x| x.to_string()).collect(), + build_command, + } + } +} + +pub struct Registry { + targets: Vec, +} + +impl Registry { + pub fn new() -> Self { Self { targets: Default::default() } } + + pub fn register(&mut self, target: Target) { self.targets.push(target) } + + pub fn search(&self, name: impl Into) -> impl Iterator + '_ { + let name = name.into(); + self.targets.iter().filter(move |target| target.package_name == name || target.groups.iter().any(|group| group == &name)).cloned() + } + + pub fn search_for_system(&self, name: impl Into, os: impl Into, arch: impl Into) -> impl Iterator + '_ { + let os = os.into(); + let arch = arch.into(); + self.search(name).filter(move |target| target.platforms.iter().any(|(a_os, a_arch)| a_os == &os && a_arch == &arch)) + } +} + +pub fn build_registry() -> Registry { + let mut registry = Registry::new(); + + registry.register(Target::new( + "brane-cc", + "branec", + &["all", "binaries"], + &[("linux", "x86_64"), ("linux", "aarch64"), ("macos", "x86_64"), ("macos", "aarch64")], + build_binary_builder("brane-cc"), + )); + + registry.register(Target::new( + "brane-cli", + "brane", + &["all", "binaries"], + &[("linux", "x86_64"), ("linux", "aarch64"), ("macos", "aarch64"), ("macos", "x86_64"), ("windows", "x86_64")], + build_binary_builder("brane-cli"), + )); + registry.register(Target::new( + "brane-ctl", + "branectl", + &["all", "binaries"], + &[("linux", "x86_64"), ("linux", "aarch64"), ("macos", "x86_64"), ("macos", "aarch64")], + build_binary_builder("brane-ctl"), + )); + registry.register(Target::new( + "brane-let", + "branelet", + &["all", "binaries"], + &[("linux", "x86_64"), ("linux", "aarch64")], + build_binary_builder("brane-let"), + )); + + registry.register(Target::new( + "brane-api", + "brane-api.tar", + &["all", "images", "central"], + &[("linux", "x86_64")], + build_image_builder("brane-api"), + )); + registry.register(Target::new( + "brane-drv", + "brane-drv.tar", + &["all", "images", "central"], + &[("linux", "x86_64")], + build_image_builder("brane-drv"), + )); + registry.register(Target::new( + "brane-plr", + "brane-plr.tar", + &["all", "images", "central"], + &[("linux", "x86_64")], + build_image_builder("brane-plr"), + )); + registry.register(Target::new( + "brane-chk", + "brane-chk.tar", + &["all", "images", "worker"], + &[("linux", "x86_64")], + build_image_builder("brane-chk"), + )); + registry.register(Target::new( + "brane-job", + "brane-job.tar", + &["all", "images", "worker"], + &[("linux", "x86_64")], + build_image_builder("brane-job"), + )); + registry.register(Target::new( + "brane-reg", + "brane-reg.tar", + &["all", "images", "worker"], + &[("linux", "x86_64")], + build_image_builder("brane-reg"), + )); + registry.register(Target::new( + "brane-prx", + "brane-prx.tar", + &["all", "images", "worker", "central"], + &[("linux", "x86_64")], + build_image_builder("brane-prx"), + )); + + registry.register(Target::new( + "brane-cli-c", + "brane_cli", + &["all", "library"], + &[("linux", "x86_64"), ("macos", "x86_64"), ("macos", "aarch64"), ("windows", "x86_64")], + build_binary_builder("brane-cli-c"), + )); + + registry +} + +pub fn build_image_builder(package: &str) -> Arc { + let package = package.to_owned(); + + Arc::new(move |info: BuildFuncInfo| { + let image_dir = "./target/release"; + + // Since this is not handled by cargo and we are "borrowing" its target directory, we need to + // set it up ourselves + let absolute_dir = info.out_dir; + create_dir_with_cachetag(absolute_dir)?; + + let mut cmd = std::process::Command::new("docker"); + + let x = cmd.args([ + "buildx", + "build", + "--output", + &format!(r#"type=docker,dest={image_dir}/{package}.tar"#), + "--file", + "Dockerfile.rls", + "--target", + &package, + ".", + ]); + + println!("{x:?}"); + + if !cmd.spawn()?.wait_with_output()?.status.success() { + anyhow::bail!("{package} compilation process failed") + } + Ok(()) + }) +} + +pub fn build_binary_builder(package: &str) -> Arc { + let package = package.to_owned(); + + Arc::new(move |_info: BuildFuncInfo| { + if !std::process::Command::new("cargo").args(["build", "--package", &package, "--release"]).spawn()?.wait_with_output()?.status.success() { + anyhow::bail!("{package} compilation process failed") + } + + Ok(()) + }) +} diff --git a/xtask/src/utilities.rs b/xtask/src/utilities.rs new file mode 100644 index 00000000..62c95c23 --- /dev/null +++ b/xtask/src/utilities.rs @@ -0,0 +1,95 @@ +use std::env::consts::*; +use std::path::{Path, PathBuf}; + +use anyhow::Context as _; +use tar::Builder; +use tokio::fs::File; +use tokio::io::BufReader; + +pub fn format_release_binary_name(name: &str) -> String { format!("{name}-{os}-{arch}{suffix}", os = OS, arch = ARCH, suffix = EXE_SUFFIX) } + +pub fn format_src_binary_name(name: &str) -> String { format!("{name}{suffix}", suffix = EXE_SUFFIX) } + +pub fn format_release_library_name(name: &str) -> String { + format!("{prefix}{name}-{os}-{arch}{suffix}.gz", os = OS, arch = ARCH, prefix = DLL_PREFIX, suffix = DLL_SUFFIX) +} + +pub fn format_src_library_name(name: &str) -> String { format!("{prefix}{name}{suffix}", prefix = DLL_PREFIX, suffix = DLL_SUFFIX) } + +pub async fn compress_file(path: impl AsRef, dest: impl AsRef) -> anyhow::Result<()> { + let path = path.as_ref(); + let dest = dest.as_ref(); + let file = File::open(path).await.with_context(|| format!("Could not open source file: {}", path.display()))?; + let mut reader = BufReader::new(file); + let dest = File::create(dest).await.with_context(|| format!("Could not open destination file: {}", dest.display()))?; + let mut encoder = async_compression::tokio::write::GzipEncoder::new(dest); + + tokio::io::copy(&mut reader, &mut encoder).await?; + Ok(()) +} + +pub fn create_tar_gz(archive_name: impl AsRef, files: impl IntoIterator) -> anyhow::Result<()> { + let archive_name = archive_name.as_ref(); + let file = std::io::BufWriter::new(std::fs::File::create(archive_name).context("Couldn't create the archive")?); + let encoder = flate2::write::GzEncoder::new(file, flate2::Compression::default()); + let mut archive = Builder::new(encoder); + + let dirname: PathBuf = archive_name + .file_name() + .ok_or_else(|| anyhow::anyhow!("Could not get filename from archive"))? + .to_string_lossy() + .strip_suffix(".tar.gz") + .ok_or_else(|| anyhow::anyhow!("Could not extract directory name from archive name"))? + .into(); + + eprintln!("Creating archive: {dirname:?}"); + + for file in files { + let filename = file + .as_path() + .file_name() + .ok_or_else(|| anyhow::anyhow!("Could not find filename"))? + .to_str() + .ok_or_else(|| anyhow::anyhow!("Could not decode filename as UTF-8"))?; + + archive.append_path_with_name(file.as_path(), dirname.join(filename)).context("Could not add file to archive")?; + } + + archive.finish().context("Could not finish writing archive")?; + + Ok(()) +} + +pub fn create_dir_with_cachetag(path: impl AsRef) -> anyhow::Result<()> { + let path = path.as_ref(); + let absolute_path = std::env::current_dir().context("Could not get current directory")?.join(path); + + if absolute_path.exists() { + if !absolute_path.is_dir() { + anyhow::bail!("Output directory location exists, but is not a directory"); + } + // Nothing to do + return Ok(()); + } + + let mut cursor = absolute_path.parent().unwrap(); + let mut child = absolute_path.as_path(); + loop { + if cursor.exists() { + // Since our child did not yet exist (by virtue that we ended up in this iteration of + // the loop), we have found the most ancient directory to be created + std::fs::create_dir(child).with_context(|| format!("Could not create new directory {path}", path = child.display()))?; + + std::fs::write(child.join("CACHEDIR.TAG"), "Signature: 8a477f597d28d172789f06886806bc55\n# Created by brane-xtask") + .context("Could not create CACHEDIR.TAG")?; + break; + } + + child = cursor; + cursor = cursor.parent().unwrap(); + } + + std::fs::create_dir_all(absolute_path).context("Could not create all remaining directories")?; + + Ok(()) +} From 7f42d2d58970e62904eb585ab0cbe1e0255db983 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Sun, 6 Apr 2025 21:21:31 +0200 Subject: [PATCH 03/12] feat(xtask): Add build command --- xtask/src/build.rs | 28 ++++++++++++++++++++++++++++ xtask/src/cli.rs | 3 +++ xtask/src/main.rs | 4 ++++ 3 files changed, 35 insertions(+) create mode 100644 xtask/src/build.rs diff --git a/xtask/src/build.rs b/xtask/src/build.rs new file mode 100644 index 00000000..58a74dda --- /dev/null +++ b/xtask/src/build.rs @@ -0,0 +1,28 @@ +use std::collections::HashSet; +use std::env::consts::{ARCH, OS}; +use std::path::PathBuf; + +use crate::registry::{self, BuildFuncInfo, build_registry}; + +pub fn build(targets: &[String]) -> anyhow::Result<()> { + let registry = registry::REGISTRY.get_or_init(build_registry); + let build_targets: HashSet<_> = targets + .iter() + .flat_map(|target| { + let mut found = registry.search_for_system(target, OS, ARCH).peekable(); + + if found.peek().is_none() { + eprintln!("Warning: Target {target} did not match any known targets for your system"); + } + + found + }) + .collect(); + + for target in build_targets { + eprintln!("Building {target}", target = target.package_name); + (target.build_command)(BuildFuncInfo { target_arch: String::from(std::env::consts::ARCH), out_dir: PathBuf::from("./target/release") })? + } + + Ok(()) +} diff --git a/xtask/src/cli.rs b/xtask/src/cli.rs index 28760e91..a2885469 100644 --- a/xtask/src/cli.rs +++ b/xtask/src/cli.rs @@ -35,6 +35,9 @@ pub(crate) mod xtask { Package { kind: PackageKind, }, + Build { + targets: Vec, + }, } #[derive(ValueEnum, Debug, Clone)] diff --git a/xtask/src/main.rs b/xtask/src/main.rs index caac995e..bc423460 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -1,5 +1,6 @@ #![allow(dead_code)] +mod build; mod cli; mod package; mod registry; @@ -56,6 +57,9 @@ async fn main() -> anyhow::Result<()> { package::create_github_package().await.context("Could not create package for GitHub")?; }, }, + XTaskSubcommand::Build { targets } => { + build::build(&targets).context("Could not build all targets")?; + }, } Ok(()) From 516e17dcc88aa05c641437de9ca7fcca64886ff6 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Sun, 6 Apr 2025 21:22:27 +0200 Subject: [PATCH 04/12] feat(xtask): Add set-version command --- Cargo.lock | 1 + xtask/Cargo.toml | 7 +-- xtask/src/cli.rs | 8 +++ xtask/src/main.rs | 4 ++ xtask/src/set_version.rs | 109 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 126 insertions(+), 3 deletions(-) create mode 100644 xtask/src/set_version.rs diff --git a/Cargo.lock b/Cargo.lock index 186fa891..1cbd3e7c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5869,6 +5869,7 @@ dependencies = [ "strum 0.27.1", "tar", "tokio", + "toml", "tracing", ] diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 6d367b12..bda0e1ef 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -17,9 +17,10 @@ clap_mangen = "0.2.26" directories = "6.0.0" flate2 = { version = "1.0.13" } specifications = { path = "../specifications" } -strum = { version = "0.27.1", features = ["derive", "strum_macros"] } -tar = "0.4.0" -tokio = "1.44.0" +strum = { version = "0.27.0", features = ["derive", "strum_macros"] } +tar = "0.4.21" +tokio = "1.38.0" +toml = "0.8.8" tracing = "0.1.41" # Only necessary for any task that interacts with the cli arguments diff --git a/xtask/src/cli.rs b/xtask/src/cli.rs index a2885469..dcbc4625 100644 --- a/xtask/src/cli.rs +++ b/xtask/src/cli.rs @@ -38,6 +38,14 @@ pub(crate) mod xtask { Build { targets: Vec, }, + SetVersion { + #[clap(short, long)] + semver: Option, + #[clap(short, long)] + prerelease: Option, + #[clap(short, long)] + metadata: Option, + }, } #[derive(ValueEnum, Debug, Clone)] diff --git a/xtask/src/main.rs b/xtask/src/main.rs index bc423460..02b6941b 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -4,6 +4,7 @@ mod build; mod cli; mod package; mod registry; +mod set_version; mod utilities; #[cfg(feature = "cli")] @@ -60,6 +61,9 @@ async fn main() -> anyhow::Result<()> { XTaskSubcommand::Build { targets } => { build::build(&targets).context("Could not build all targets")?; }, + XTaskSubcommand::SetVersion { semver, prerelease, metadata } => { + set_version::set_version(semver, prerelease, metadata).context("Could not rewrite version")?; + }, } Ok(()) diff --git a/xtask/src/set_version.rs b/xtask/src/set_version.rs new file mode 100644 index 00000000..8837498b --- /dev/null +++ b/xtask/src/set_version.rs @@ -0,0 +1,109 @@ +use std::process::Stdio; + +use anyhow::Context as _; + +pub fn set_version(semver: Option, prerelease: Option, metadata: Option) -> anyhow::Result<()> { + let mut table = std::fs::read_to_string("Cargo.toml").context("Could not read Cargo.toml")?.parse::()?; + let version = table + .get_mut("workspace") + .context("Could not find field 'workspace' in Cargo.toml")? + .get_mut("package") + .context("Could not find field 'workspace.package' in Cargo.toml")? + .get_mut("version") + .context("Could not find field 'version' in workspace.package")?; + let version_str = version.as_str().context("Could not convert package version to str")?; + + let metadata = match metadata { + Some(m) => Some(m.replace("$git_hash", &get_git_hash()?[..8]).replace("$git_dirty", if get_git_dirty()? { ".dirty" } else { "" })), + None => None, + }; + + let new_version = rewrite_version(version_str, semver.as_deref(), prerelease.as_deref(), metadata.as_deref()); + *version = new_version.into(); + + std::fs::write("Cargo.toml", table.to_string()).context("Could not write to Cargo.toml")?; + + Ok(()) +} + +fn get_git_hash() -> anyhow::Result { + let bytes = std::process::Command::new("git") + .args(["rev-parse", "HEAD"]) + .stdout(Stdio::piped()) + .stderr(Stdio::null()) + .output() + .context("Could not get latest git commit hash")? + .stdout; + + String::from_utf8(bytes).context("Could not convert git hash to unicode string") +} + +fn get_git_dirty() -> anyhow::Result { + Ok(!std::process::Command::new("git") + .args(["diff-index", "--quiet", "HEAD", "--"]) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .output() + .context("Could not determine whether working tree is dirty")? + .status + .success()) +} + +fn parse_version(version_str: &str) -> (&str, Option<&str>, Option<&str>) { + if let Some((semver, remainder)) = version_str.split_once('-') { + if let Some((prerelease, metadata)) = remainder.split_once('+') { + (semver, Some(prerelease), Some(metadata)) + } else { + (semver, Some(remainder), None) + } + } else if let Some((semver, metadata)) = version_str.split_once('+') { + (semver, None, Some(metadata)) + } else { + (version_str, None, None) + } +} + +fn rewrite_version(version_str: &str, semver: Option<&str>, prerelease: Option<&str>, metadata: Option<&str>) -> String { + let (semver_old, prerelease_old, metadata_old) = parse_version(version_str); + + let mut new_version = semver.unwrap_or(semver_old).to_owned(); + + if let Some(prerelease) = prerelease.or(prerelease_old) { + if !prerelease.is_empty() { + new_version.push('-'); + new_version.push_str(prerelease); + } + } + if let Some(metadata) = metadata.or(metadata_old) { + if !metadata.is_empty() { + new_version.push('+'); + new_version.push_str(metadata); + } + } + + new_version +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parse_version() { + assert_eq!(parse_version("1.2.3"), ("1.2.3", None, None)); + assert_eq!(parse_version("1.2.3-nightly"), ("1.2.3", Some("nightly"), None)); + assert_eq!(parse_version("1.2.3-nightly+abcdef"), ("1.2.3", Some("nightly"), Some("abcdef"))); + assert_eq!(parse_version("1.2.3+abcdef"), ("1.2.3", None, Some("abcdef"))); + assert_eq!(parse_version("1.2.3-nightly-release"), ("1.2.3", Some("nightly-release"), None)); + } + + #[test] + fn test_rewrite_version() { + assert_eq!(rewrite_version("1.2.3", None, Some("nightly"), None), String::from("1.2.3-nightly")); + assert_eq!(rewrite_version("1.2.3-nightly", None, Some("rc.1"), None), String::from("1.2.3-rc.1")); + assert_eq!(rewrite_version("1.2.3-nightly+abcdef", None, Some("rc.1"), None), String::from("1.2.3-rc.1+abcdef")); + assert_eq!(rewrite_version("1.2.3-nightly+abcdef", None, None, Some("123456")), String::from("1.2.3-nightly+123456")); + assert_eq!(rewrite_version("1.2.3-nightly+abcdef", None, Some("rc.1"), Some("123456")), String::from("1.2.3-rc.1+123456")); + assert_eq!(rewrite_version("1.2.3-nightly+abcdef", Some("2.0.0"), Some(""), Some("")), String::from("2.0.0")); + } +} From 0c4d94c78fab81334bf0519157f84b22fde280f4 Mon Sep 17 00:00:00 2001 From: Lut99 Date: Mon, 31 Mar 2025 22:37:05 +0200 Subject: [PATCH 05/12] Removed obsolete files from the repository root --- Dockerfile.mac | 5 - chroot_make.py | 206 --- make.py | 3918 --------------------------------------------- policy_token.json | 1 - test.bs | 4 - 5 files changed, 4134 deletions(-) delete mode 100644 Dockerfile.mac delete mode 100644 chroot_make.py delete mode 100755 make.py delete mode 100644 policy_token.json delete mode 100644 test.bs diff --git a/Dockerfile.mac b/Dockerfile.mac deleted file mode 100644 index f7da0632..00000000 --- a/Dockerfile.mac +++ /dev/null @@ -1,5 +0,0 @@ -FROM messense/cargo-zigbuild:0.17.3 - -# Add our own dependencies -RUN apt-get update && apt-get install -y cmake \ - && rm -rf /var/lib/apt/lists/* diff --git a/chroot_make.py b/chroot_make.py deleted file mode 100644 index c14b46ed..00000000 --- a/chroot_make.py +++ /dev/null @@ -1,206 +0,0 @@ -# CHROOT MAKE.py -# by Lut99 -# -# Created: -# 24 Jan 2023, 16:36:23 -# Last edited: -# 24 Jan 2023, 17:02:50 -# Auto updated? -# Yes -# -# Description: -# A script that can compile the binaries (CLI, CTL, CC) against a specific -# GLIBC version using chroot. -# -# This script uses `debootstrap` to locally initialize a new Debian -# filesystem if this has not yet been done so. Then, it will install the -# required packages to build the Brane binaries. It will then build against -# the target GLIBC version to be backwards compatible with previous -# linux versions. -# -# Then, if the environment exists, the program executes a build command and -# builds the requested binary. -# -# Note that this operation can take up quite a lot of space, so it is -# recommended to destroy the environment `./debian_chroot_glibc_` -# once finished. -# - -import argparse -import os -import sys - - -##### GLOBALS ##### -# Whether we are in debug mode or not. -DEBUG = False - - - - - -##### HELPER FUNCTIONS ##### -def tty_supports_colour() -> bool: - """ - Determines whether `stdout` and `stderr` should add ANSI colour codes. - - From: https://stackoverflow.com/a/22254892 - - # Returns - True if they should, or False if they shouldn't. - """ - - plat = sys.platform - supported_platform = plat != 'Pocket PC' and (plat != 'win32' or - 'ANSICON' in os.environ) - # isatty is not always implemented, #6223. - is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() - return supported_platform and is_a_tty - -def dprint(text: str): - """ - Logs a debug statement to stdout based on whether `--debug` is given or - not. - - # Globals - - `DEBUG`: Reads the variable that determines if we are in debug mode. - - # Arguments - - `text`: The text to write to the stdout. - """ - - global DEBUG - if DEBUG: - # Assign the colours - start = "\033[90m" if tty_supports_colour() else "" - end = "\033[0m" if tty_supports_colour() else "" - print(f"{start}[debug] {text}{end}") - -def wprint(text: str): - """ - Logs a warning message to stderr. - - # Arguments - - `text`: The text to write to the stderr. - """ - - # Assign the colours - start = "\033[93;1m" if tty_supports_colour() else "" - bold = "\033[1m" if tty_supports_colour() else "" - end = "\033[0m" if tty_supports_colour() else "" - print(f"{start}[WARNING]{end} {bold}{text}{end}") - -def eprint(text: str): - """ - Logs an error message to stderr. - - # Arguments - - `text`: The text to write to the stderr. - """ - - # Assign the colours - start = "\033[91;1m" if tty_supports_colour() else "" - bold = "\033[1m" if tty_supports_colour() else "" - end = "\033[0m" if tty_supports_colour() else "" - print(f"{start}[ERROR]{end} {bold}{text}{end}") - - - - - -##### CHROOT FUNCTIONS ##### -def build_chroot(path: str, glibc_version: str, debian_version: str) -> int: - """ - Builds the chroot environment for the specific GLIBC version. - - # Arguments - - `path`: The path to write the new environment to. - - `glibc_version`: The GLIBC version to install in that environment. - - `debian_version`: The Debian version to install. - - # Returns - - """ - - - - - -##### ENTRYPOINT ##### -def main(binary: str, path: str, glibc_version: str, debian_version: str, debugging: bool) -> int: - # Set the debugging mode - global DEBUG - DEBUG = debugging - - # Show a header thingy - print() - print("### CHROOT MAKE.py for the BRANE PROJECT ###") - print("(Use '--help' for options)") - print() - print(f"Building Brane {binary.upper()} against GLIBC version {glibc_version} in a local {debian_version} filesystem using chroot ({path}).") - print() - - # Replace the wildcards - path = path.replace("$GLIBC_VERSION", glibc_version).replace("$DEBIAN_VERSION", debian_version) - - # Check if the given environment exists - if os.path.exists(path): - # Check if it is a directory - if os.path.isdir(path): - print("Chroot already exists") - print("(Remove the folder and re-run the script to rebuild it)") - else: - # Make sure that the user allows us doing this - dprint(f"Marking chroot as outdated because '{path}' exists but isn't a folder") - wprint(f"'{path}' already exists but is not a folder. Overwrite?") - while True: - yn = input("(y/n): ") - if yn == "y": break - print() - print("Aborted.") - print() - return 0 - - # Remove the old file - print(f"Removing '{path}'...") - os.remove(path) - - # Build the chroot - print("Building chroot...") - build_chroot(glibc_version, debian_version) - else: - dprint(f"Marking chroot as outdated because '{path}' does not exist") - print("Building chroot...") - build_chroot(glibc_version, debian_version) - - # Hook the Brane source to the chroot environment - - # Enter the environment and run the build - - # Done - print() - print("Done.") - print() - return 0 - - - -# Actual entrypoint -if __name__ == "__main__": - # Define the arguments - parser = argparse.ArgumentParser() - parser.add_argument("BINARY", choices=[ "cli", "ctl", "cc" ], help="The binary to build in the chroot. Uses the same naming scheme as the main `make.py` file.") - parser.add_argument("--debug", action="store_true", help="If given, shows additional debug statements.") - parser.add_argument("-v", "--glibc-version", default="2.28", help="The GLIBC version to build inside the environment. If you specify a version and no additional building process for GLIBC is triggered, it means that the Debian version ships this by default.") - parser.add_argument("-d", "--debian-version", default="buster", help="The Debian version to instantiate. Should be one of the version's codenames.") - parser.add_argument("-p", "--path", default="./$DEBIAN_VERSION_chroot_glibc_$GLIBC_VERSION", help="The location of the new chroot directory. You can use '$GLIBC_VERSION' and '$DEBIAN_VERSION' as wildcards for the specified GLIBC and Debian versions, respectively.") - - # Parse the arguments - args = parser.parse_args() - - # Run main - exit(main(args.BINARY, args.path, args.glibc_version, args.debian_version, args.debug)) - - # sudo debootstrap --variant=buildd --arch amd64 buster ~/buster_chroot - # sudo chroot ~/buster_chroot - # export PATH="/bin:/sbin:/usr/bin:/usr/local/bin:/root/.cargo/bin" diff --git a/make.py b/make.py deleted file mode 100755 index 4a2bb053..00000000 --- a/make.py +++ /dev/null @@ -1,3918 +0,0 @@ -#!/usr/bin/env python3 -# MAKE.py -# by Lut99 -# -# Created: -# 09 Jun 2022, 12:20:28 -# Last edited: -# 08 Aug 2024, 14:17:26 -# Auto updated? -# Yes -# -# Description: -# Python script that implements the (more advanced) make script for the -# Brane infrastructure. -# - -from __future__ import annotations - -import abc -import argparse -import hashlib -import json -import os -import pathlib -import platform -import shutil -import subprocess -import sys -import tarfile -import typing - - -##### CONSTANTS ##### -# List of services that live in the central part of an instance -CENTRAL_SERVICES = [ "prx", "api", "drv", "plr" ] -# List of auxillary services in the central part of an instance -# At least, the ones we have to build. -AUX_CENTRAL_SERVICES = [] -# List of services that live in a worker node in an instance -WORKER_SERVICES = [ "prx", "job", "reg", "chk" ] -# List of auxillary services in a worker node in an instance -AUX_WORKER_SERVICES = [] - -# The directory where we compile OpenSSL to -OPENSSL_DIR = "./target/openssl/$ARCH" - -# The desired source files that we want to build against for OpenSSL -OPENSSL_FILES = [ - OPENSSL_DIR + "/lib/libcrypto.a", OPENSSL_DIR + "/lib/libssl.a", - OPENSSL_DIR + "/lib/pkgconfig/libcrypto.pc", OPENSSL_DIR + "/lib/pkgconfig/libssl.pc", OPENSSL_DIR + "/lib/pkgconfig/openssl.pc", - OPENSSL_DIR + "/include/openssl/aes.h", OPENSSL_DIR + "/include/openssl/asn1err.h", OPENSSL_DIR + "/include/openssl/asn1.h", - OPENSSL_DIR + "/include/openssl/asn1_mac.h", OPENSSL_DIR + "/include/openssl/asn1t.h", OPENSSL_DIR + "/include/openssl/asyncerr.h", - OPENSSL_DIR + "/include/openssl/async.h", OPENSSL_DIR + "/include/openssl/bioerr.h", OPENSSL_DIR + "/include/openssl/bio.h", - OPENSSL_DIR + "/include/openssl/blowfish.h", OPENSSL_DIR + "/include/openssl/bnerr.h", OPENSSL_DIR + "/include/openssl/bn.h", - OPENSSL_DIR + "/include/openssl/buffererr.h", OPENSSL_DIR + "/include/openssl/buffer.h", OPENSSL_DIR + "/include/openssl/camellia.h", - OPENSSL_DIR + "/include/openssl/cast.h", OPENSSL_DIR + "/include/openssl/cmac.h", OPENSSL_DIR + "/include/openssl/cmserr.h", - OPENSSL_DIR + "/include/openssl/cms.h", OPENSSL_DIR + "/include/openssl/comperr.h", OPENSSL_DIR + "/include/openssl/comp.h", - OPENSSL_DIR + "/include/openssl/conf_api.h", OPENSSL_DIR + "/include/openssl/conferr.h", OPENSSL_DIR + "/include/openssl/conf.h", - OPENSSL_DIR + "/include/openssl/cryptoerr.h", OPENSSL_DIR + "/include/openssl/crypto.h", OPENSSL_DIR + "/include/openssl/cterr.h", - OPENSSL_DIR + "/include/openssl/ct.h", OPENSSL_DIR + "/include/openssl/des.h", OPENSSL_DIR + "/include/openssl/dherr.h", - OPENSSL_DIR + "/include/openssl/dh.h", OPENSSL_DIR + "/include/openssl/dsaerr.h", OPENSSL_DIR + "/include/openssl/dsa.h", - OPENSSL_DIR + "/include/openssl/dtls1.h", OPENSSL_DIR + "/include/openssl/ebcdic.h", OPENSSL_DIR + "/include/openssl/ecdh.h", - OPENSSL_DIR + "/include/openssl/ecdsa.h", OPENSSL_DIR + "/include/openssl/ecerr.h", OPENSSL_DIR + "/include/openssl/ec.h", - OPENSSL_DIR + "/include/openssl/engineerr.h", OPENSSL_DIR + "/include/openssl/engine.h", OPENSSL_DIR + "/include/openssl/e_os2.h", - OPENSSL_DIR + "/include/openssl/err.h", OPENSSL_DIR + "/include/openssl/evperr.h", OPENSSL_DIR + "/include/openssl/evp.h", - OPENSSL_DIR + "/include/openssl/hmac.h", OPENSSL_DIR + "/include/openssl/idea.h", OPENSSL_DIR + "/include/openssl/kdferr.h", - OPENSSL_DIR + "/include/openssl/kdf.h", OPENSSL_DIR + "/include/openssl/lhash.h", OPENSSL_DIR + "/include/openssl/md2.h", - OPENSSL_DIR + "/include/openssl/md4.h", OPENSSL_DIR + "/include/openssl/md5.h", OPENSSL_DIR + "/include/openssl/mdc2.h", - OPENSSL_DIR + "/include/openssl/modes.h", OPENSSL_DIR + "/include/openssl/objectserr.h", OPENSSL_DIR + "/include/openssl/objects.h", - OPENSSL_DIR + "/include/openssl/obj_mac.h", OPENSSL_DIR + "/include/openssl/ocsperr.h", OPENSSL_DIR + "/include/openssl/ocsp.h", - OPENSSL_DIR + "/include/openssl/opensslconf.h", OPENSSL_DIR + "/include/openssl/opensslv.h", OPENSSL_DIR + "/include/openssl/ossl_typ.h", - OPENSSL_DIR + "/include/openssl/pem2.h", OPENSSL_DIR + "/include/openssl/pemerr.h", OPENSSL_DIR + "/include/openssl/pem.h", - OPENSSL_DIR + "/include/openssl/pkcs12err.h", OPENSSL_DIR + "/include/openssl/pkcs12.h", OPENSSL_DIR + "/include/openssl/pkcs7err.h", - OPENSSL_DIR + "/include/openssl/pkcs7.h", OPENSSL_DIR + "/include/openssl/rand_drbg.h", OPENSSL_DIR + "/include/openssl/randerr.h", - OPENSSL_DIR + "/include/openssl/rand.h", OPENSSL_DIR + "/include/openssl/rc2.h", OPENSSL_DIR + "/include/openssl/rc4.h", - OPENSSL_DIR + "/include/openssl/rc5.h", OPENSSL_DIR + "/include/openssl/ripemd.h", OPENSSL_DIR + "/include/openssl/rsaerr.h", - OPENSSL_DIR + "/include/openssl/rsa.h", OPENSSL_DIR + "/include/openssl/safestack.h", OPENSSL_DIR + "/include/openssl/seed.h", - OPENSSL_DIR + "/include/openssl/sha.h", OPENSSL_DIR + "/include/openssl/srp.h", OPENSSL_DIR + "/include/openssl/srtp.h", - OPENSSL_DIR + "/include/openssl/ssl2.h", OPENSSL_DIR + "/include/openssl/ssl3.h", OPENSSL_DIR + "/include/openssl/sslerr.h", - OPENSSL_DIR + "/include/openssl/ssl.h", OPENSSL_DIR + "/include/openssl/stack.h", OPENSSL_DIR + "/include/openssl/storeerr.h", - OPENSSL_DIR + "/include/openssl/store.h", OPENSSL_DIR + "/include/openssl/symhacks.h", OPENSSL_DIR + "/include/openssl/tls1.h", - OPENSSL_DIR + "/include/openssl/tserr.h", OPENSSL_DIR + "/include/openssl/ts.h", OPENSSL_DIR + "/include/openssl/txt_db.h", - OPENSSL_DIR + "/include/openssl/uierr.h", OPENSSL_DIR + "/include/openssl/ui.h", OPENSSL_DIR + "/include/openssl/whrlpool.h", - OPENSSL_DIR + "/include/openssl/x509err.h", OPENSSL_DIR + "/include/openssl/x509.h", OPENSSL_DIR + "/include/openssl/x509v3err.h", - OPENSSL_DIR + "/include/openssl/x509v3.h", OPENSSL_DIR + "/include/openssl/x509_vfy.h" -] - - - - - -##### HELPER FUNCTIONS ##### -def supports_color(): - """ - Returns True if the running system's terminal supports color, and False - otherwise. - - From: https://stackoverflow.com/a/22254892 - """ - plat = sys.platform - supported_platform = plat != 'Pocket PC' and (plat != 'win32' or - 'ANSICON' in os.environ) - # isatty is not always implemented, #6223. - is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty() - return supported_platform and is_a_tty - -def wrap_description(text: str, indent: int, max_width: int, skip_first_indent: bool = False) -> str: - """ - Wraps the given piece of text to be at most (but not including) `max_width` characters wide. - - The `indent` indicates some arbitrary amount of indent to add before each line. This is included in the total width of the line. - """ - - # Sanity check the indent - if indent >= max_width: - raise ValueError(f"indent must be lower than max_width (assertion {indent} < {max_width} fails)") - - # Go through the text word-by-word - result = f"{(' ' * indent) if not skip_first_indent else ''}" - w = indent - for word in text.split(): - # Get the length of the word minus the ansi escaped codes - word_len = 0 - i = 0 - while i < len(word): - # Get the char - c = word[i] - - # Switch on ansi escape character or not - if c == '\033': - c = word[i + 1] - if c == '[': - # It is ansi; skip until and including the 'm' - j = 0 - while i + j < len(word) and word[i + j] != 'm': - j += 1 - i += 1 + j + 1 - - # Go to the next char - word_len += 1 - i += 1 - - # Check if this word fits as a whole - if w + word_len < max_width: - # It does, add it - result += word - w += word_len - else: - # Otherwise, always go to a new line - result += f"\n{' ' * indent}" - w = indent - - # Now pop entire lines off the word if it's always too long - while w + word_len >= max_width: - # Write the front line worth of characters - result += f"{word[:max_width - w]}\n{' ' * indent}" - w = indent - # Shrink the word - word = word[max_width - w:] - word_len -= max_width - w - - # The word *must* fit now, so write it - result += word - w += word_len - - # If it still fits, add a space - if w + 1 < max_width: - result += ' ' - w += 1 - - # Done - return result - -def to_bytes(val: int) -> str: - """ - Pretty-prints the given value to some byte count. - """ - - if val < 1000: - return f"{val:.2f} bytes" - elif val < 1000000: - return f"{val / 1000:.2f} KB" - elif val < 1000000000: - return f"{val / 1000000:.2f} MB" - elif val < 1000000000000: - return f"{val / 1000000000:.2f} GB" - elif val < 1000000000000000: - return f"{val / 1000000000000:.2f} TB" - else: - return f"{val / 1000000000000000:.2f} PB" - -def perror(text: str = "", colour: bool = True): - """ - Writes text to stderr, as an Error. - """ - - # Get colours - start = "\033[91;1m" if colour and supports_color() else "" - end = "\033[0m" if colour and supports_color() else "" - - # Print it - print(f"{start}[ERROR] {text}{end}", file=sys.stderr) - -def pwarning(text: str = "", colour: bool = True): - """ - Writes text to srderr, as a warning string. - """ - - # Get colours - start = "\033[93;1m" if colour and supports_color() else "" - end = "\033[0m" if colour and supports_color() else "" - - # Print it - print(f"{start}[warning] {text}{end}", file=sys.stderr) - -def pdebug(text: str = "", colour: bool = True): - """ - Writes text to stdout, as a debug string. - """ - - # Skip if not debugging - if not debug: return - - # Get colours - start = "\033[90m" if colour and supports_color() else "" - end = "\033[0m" if colour and supports_color() else "" - - # Print it - print(f"{start}[debug] {text}{end}") - -def cancel(text: str = "", code = 1, colour: bool = True) -> typing.NoReturn: - """ - Prints some error message to stderr, then quits the program by calling exit(). - """ - - perror(text, colour=colour) - exit(code) - -def resolve_args(text: str, args: argparse.Namespace) -> str: - """ - Returns the same string, but with a couple of values replaced: - - `$RELEASE` with 'release' or 'debug' (depending on the '--dev' flag) - - `$OS` with a default identifier for the OS (see '$RUST_OS') - - `$RUST_OS` with a Rust-appropriate identifier for the OS (based on the '--os' flag) - - `$ARCH` with a default identifier for the architecture (see '$RUST_ARCH') - - `$RUST_ARCH` with a Rust-appropriate identifier for the architecture (based on the '--arch' flag) - - `$DOCKER_ARCH` with a Docker-appropriate identifier for the architecture (based on the '--arch' flag) - - `$JUICEFS_ARCH` with a JuiceFS-appropriate identifier for the architecture (based on the '--arch' flag) - - `$CWD` with the current working directory (based on what `os.getcwd()` reports) - """ - - return text \ - .replace("$RELEASE", "release" if not args.dev else "debug") \ - .replace("$OS", args.os.to_rust()) \ - .replace("$RUST_OS", args.os.to_rust()) \ - .replace("$ARCH", args.arch.to_rust()) \ - .replace("$RUST_ARCH", args.arch.to_rust()) \ - .replace("$DOCKER_ARCH", args.arch.to_docker()) \ - .replace("$JUICEFS_ARCH", args.arch.to_juicefs()) \ - .replace("$REASONER", args.reasoner) \ - .replace("$CWD", os.getcwd()) - -def cache_outdated(args: argparse.Namespace, file: str, is_src: bool) -> bool: - """ - Checks if the given source file/directory exists and needs - recompilation. - - It needs recompilation if: - - It's a directory: - - Any of its source files (recursively) needs to be recompiled - - It's a file: - - The file's hash wasn't cached yet - - The hashes of the file & directory do not match - - Additionally, the user will be warned if the source doesn't exist. - """ - - # Get absolute version of the hash_cache - hash_cache = os.path.abspath(args.cache + ("/srcs" if is_src else "/dsts")) - - # Match the type of the source file - if os.path.isfile(file): - # It's a file; check if we know its hash - hsrc = os.path.abspath(hash_cache + f"/{file}") - if hsrc[:len(hash_cache)] != hash_cache: raise ValueError(f"Hash source '{hsrc}' is not in the hash cache ({hash_cache}); please do not escape it") - if not os.path.exists(hsrc): - pdebug(f"Cached file '{file}' marked as outdated because it has no cache entry (missing '{hsrc}')") - return True - - # Compute the hash of the file - try: - with open(file, "rb") as h: - src_hash = hashlib.md5() - while True: - data = h.read(65536) - if not data: break - src_hash.update(h.read()) - except IOError as e: - pwarning(f"Failed to read source file '{file}' for hashing: {e} (assuming target needs to be rebuild)") - return True - - # Compare it with that in the file - try: - with open(hsrc, "r") as h: - cache_hash = h.read() - except IOError as e: - pwarning(f"Failed to read hash cache file '{hsrc}': {e} (assuming target needs to be rebuild)") - return True - if src_hash.hexdigest() != cache_hash: - pdebug(f"Cached file '{file}' marked as outdated because its hash does not match that in the cache (cache file: '{hsrc}')") - return True - - # Otherwise, no recompilation needed - return False - - elif os.path.isdir(file): - # It's a dir; recurse - for nested_file in os.listdir(file): - if cache_outdated(args, os.path.join(file, nested_file), is_src): - pdebug(f"Cached directory '{file}' marked as outdated because one of its children ('{nested_file}') is outdated") - return True - return False - - else: - # In this case, we're sure rebuilding needs to happen (since they may be as a result from a dependency) - pwarning(f"Cached file '{file}' is not a file or directory (is the sources/results list up-to-date?)") - return True - -def update_cache(args: argparse.Namespace, file: str, is_src: bool): - """ - Updates the hash of the given source file in the given hash cache. - If the src is a file, then we simply compute the hash. - We recurse if it's a directory. - """ - - # Get absolute version of the hash_cache - hash_cache = os.path.abspath(args.cache + ("/srcs" if is_src else "/dsts")) - - # Match the type of the source file - if os.path.isfile(file): - # Attempt to compute the hash - try: - with open(file, "rb") as h: - src_hash = hashlib.md5() - while True: - data = h.read(65536) - if not data: break - src_hash.update(h.read()) - except IOError as e: - pwarning(f"Failed to read source file '{file}' to compute hash: {e} (compilation will likely always happen until fixed)") - return - - # Check if the target directory exists - hsrc = os.path.abspath(hash_cache + f"/{file}") - if hsrc[:len(hash_cache)] != hash_cache: raise ValueError(f"Hash source '{hsrc}' is not in the hash cache ({hash_cache}); please do not escape it") - os.makedirs(os.path.dirname(hsrc), exist_ok=True) - - # Write the hash to it - try: - with open(hsrc, "w") as h: - h.write(src_hash.hexdigest()) - except IOError as e: - pwarning(f"Failed to write hash cache to '{hsrc}': {e} (compilation will likely always happen until fixed)") - - elif os.path.isdir(file): - # It's a dir; recurse - for nested_file in os.listdir(file): - update_cache(args, os.path.join(file, nested_file), is_src) - - else: - # Warn the user - pwarning(f"Path '{file}' not found (are the source and destination lists up-to-date?)") - -def flags_changed(args: argparse.Namespace, name: str) -> bool: - """ - Given the list of arguments, examines if the last time the given Target was compiled any relevant flags were different. - - Flags examined are: - - `--dev` - - `--con` - """ - - # Get absolute version of the hash_cache - flags_cache = os.path.abspath(args.cache + "/flags") - fsrc = flags_cache + f"/{name}" - - # If the file does not exist, then we always go on - if not os.path.exists(fsrc): - pdebug(f"Flags file '{fsrc}' not found; assuming target is outdated") - return True - - # Attempt to read the cache file - cached: dict[str, typing.Any] = { - "dev": None, - "con": None, - } - try: - with open(fsrc, "r") as h: - # Read line-by-line - l = 0 - for line in h.readlines(): - l += 1 - - # Attempt to split into flag/value pair - parts = [p.strip() for p in line.split("=")] - if len(parts) != 2: - pwarning(f"Line {l} in flags cache file '{fsrc}' is not a valid flag/value pair (skipping line)") - continue - (flag, value) = (parts[0].lower(), parts[1]) - - # See if we now this flag - if flag not in cached: - pwarning(f"Line {l} in flags cache file '{fsrc}' has unknown flag '{flag}' (ignoring)") - continue - - # Split on the flag to parse further - if flag == "dev": - cached["dev"] = value.lower() == "true" - elif flag == "con": - cached["con"] = value.lower() == "true" - - except IOError as e: - pwarning(f"Could not read flags cache file '{fsrc}': {e} (assuming target is outdated)") - return True - - # Now compare - for flag in cached: - # Make sure we parsed this one - if cached[flag] is None: - pwarning(f"Missing flag '{flag}' in flags cache file '{fsrc}' (assuming target is outdated)") - return True - # Check if outdated - if getattr(args, flag) != cached[flag]: - pdebug(f"Flags in flags file '{fsrc}' do not match current flags; assuming target is outdated") - return True - - # No outdating occurred - return False - -def update_flags(args: argparse.Namespace, name: str): - """ - Updates the flag cache for the given Target to the current args dict. - """ - - # Get absolute version of the hash_cache - flags_cache = os.path.abspath(args.cache + "/flags") - - # Set the values - cached = { - "dev": args.dev, - "con": args.con, - } - - # Write it - fsrc = flags_cache + f"/{name}" - os.makedirs(os.path.dirname(fsrc), exist_ok=True) - try: - with open(fsrc, "w") as h: - for (flag, value) in cached.items(): - h.write(f"{flag}={value}\n") - except IOError as e: - pwarning(f"Could not write flags cache file '{fsrc}': {e} (recompilation will occur for this target until fixed)") - -def deduce_toml_src_dirs(toml: str) -> typing.List[str] | None: - """ - Given a Cargo.toml file, attempts to deduce the (local) source crates. - - Returns a list of the folders that are the crates on which the - Cargo.toml depends, including the one where it lives (i.e., its - directory-part). - """ - - res = [ os.path.dirname(toml) ] - - # Scan the lines in the file - try: - with open(toml, "r") as h: - # Read it all - text = h.read() - - # Parse - parser = CargoTomlParser(text) - (res, errs) = parser.parse() - if len(errs) > 0: - for err in errs: - perror(f"{err}") - return None - - # Else, resolve the given paths - for i in range(len(res)): - res[i] = os.path.join(os.path.dirname(toml), res[i]) - # Add the cargo path - res.append(os.path.dirname(toml)) - # Make all paths absolute - for i in range(len(res)): - res[i] = os.path.abspath(res[i]) - - # Done - return res - - except IOError as e: - cancel(f"Could not read given Cargo.toml '{toml}': {e}") - -def get_image_digest(path: str) -> str: - """ - Given a Docker image .tar file, attempts to read the digest and return it. - """ - - # Open the tar file - archive = tarfile.open(path) - - # Find the manifest file - digest = None - for file in archive.getmembers(): - # Skip if not the proper file - if not file.isfile() or file.name != "manifest.json": continue - - # Attempt to read it - f = archive.extractfile(file) - if f is None: - cancel(f"Failed to extract archive member '{file}' in '{path}'.") - smanifest = f.read().decode("utf-8") - f.close() - - # Read as json - manifest = json.loads(smanifest) - - # Extract the config blob (minus prefix) - config = manifest[0]["Config"] - if config[:13] != "blobs/sha256/": cancel("Found Config in manifest.json, but blob had incorrect start (corrupted image .tar?)") - config = config[13:] - - # Done - digest = config - - # Throw a failure - if digest is None: - cancel(f"Did not find image digest in {path} (is it a valid Docker image file?)") - - # Done - archive.close() - return digest - - - -##### HELPER CLASSES ##### -class CargoTomlParser: - """ - Parses a given file as if it were a Cargo.toml file. - - This is definitely not a TOML compliant-parser, though, not least of - which because it only extracts stuff under the 'dependencies' toplevel - section. - """ - - - # Baseclasses - class Symbol(abc.ABC): - """ - Baseclass for all the symbols. - """ - - is_term : bool - start : typing.Tuple[int, int] - end : typing.Tuple[int, int] - - - def __init__(self, is_term: bool, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the Symbol. - - # Arguments - - `is_term`: Whether this Symbol is a terminal or not (it's a nonterminal). - - `start`: The (inclusive) start position of this symbol in the text. - - `stop`: The (inclusive) stop position of this symbol in the text. - """ - - self.is_term = is_term - self.start = start - self.end = end - - def __str__(self) -> str: - return "Symbol" - - class Terminal(Symbol): - """ - Baseclass for all the parser tokens. - """ - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the Terminal. - - # Arguments - - `start`: The (inclusive) start position of this symbol in the text. - - `end`: The (inclusive) stop position of this symbol in the text. - """ - - CargoTomlParser.Symbol.__init__(self, True, start, end) - - def __str__(self) -> str: - return "Terminal" - - class Nonterminal(Symbol): - """ - Baseclass for all the parser nonterminals. - """ - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the Nonterminal. - - # Arguments - - `start`: The (inclusive) start position of this symbol in the text. - - `end`: The (inclusive) stop position of this symbol in the text. - """ - - CargoTomlParser.Symbol.__init__(self, False, start, end) - - def __str__(self) -> str: - return "NonTerminal" - - - # Terminals - class Identifier(Terminal): - """ - Helper class for the CargoTomlParser which represents a string token. - """ - - value : str - - - def __init__(self, value: str, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the String - - Arguments - - `value`: The value of the String. - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - self.value = value - - def __str__(self) -> str: - return f"Identifier({self.value})" - - class String(Terminal): - """ - Helper class for the CargoTomlParser which represents a string value. - """ - - value : str - - - def __init__(self, value: str, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the String - - Arguments - - `value`: The value of the String. - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - self.value = value - - def __str__(self) -> str: - return f"String({self.value})" - - class Boolean(Terminal): - """ - Helper class for the CargoTomlParser which represents a boolean value. - """ - - value : bool - - - def __init__(self, value: bool, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the Boolean - - Arguments - - `value`: The value of the Boolean. - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - self.value = value - - def __str__(self) -> str: - return f"Boolean({self.value})" - - class Equals(Terminal): - """ - Helper class for the CargoTomlParser which represents an equals sign. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the Equals - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "Equals" - - class Comma(Terminal): - """ - Helper class for the CargoTomlParser which represents a comma. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the Comma - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "Comma" - - class LCurly(Terminal): - """ - Helper class for the CargoTomlParser which represents a left curly bracket. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the LCurly - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "LCurly" - - class RCurly(Terminal): - """ - Helper class for the CargoTomlParser which represents a right curly bracket. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the RCurly - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "RCurly" - - class LSquare(Terminal): - """ - Helper class for the CargoTomlParser which represents a left square bracket. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the LSquare - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "LSquare" - - class RSquare(Terminal): - """ - Helper class for the CargoTomlParser which represents a right square bracket. - """ - - - def __init__(self, start: typing.Tuple[int, int], end: typing.Tuple[int, int]) -> None: - """ - Constructor for the RSquare - - Arguments - - `start`: The start position (as (row, col), inclusive) of the token. - - `end`: The end position (as (row, col), inclusive) of the token. - """ - - CargoTomlParser.Terminal.__init__(self, start, end) - - def __str__(self) -> str: - return "RSquare" - - - # Nonterminals - class Section(Nonterminal): - """ - Represents a section in the TOML file. - """ - - header : CargoTomlParser.SectionHeader - pairs : typing.List[CargoTomlParser.KeyValuePair] - - - def __init__(self, header: CargoTomlParser.SectionHeader, pairs: typing.List[CargoTomlParser.KeyValuePair], start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `header`: The header of the section. - - `pairs`: The key/value pairs in this section. - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.header = header - self.pairs = pairs - - def __str__(self) -> str: - return f"Section({self.header}, ...)" - - class SectionHeader(Nonterminal): - """ - Represents a section header. - """ - - name : str - - def __init__(self, name: str, start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `name`: The name of the section. - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.name = name - - def __str__(self) -> str: - return f"SectionHeader({self.name})" - - class KeyValuePair(Nonterminal): - """ - Represents a Key/Value pair in the stack. - """ - - key : CargoTomlParser.Identifier - value : CargoTomlParser.Value - - - def __init__(self, key: CargoTomlParser.Identifier, value: CargoTomlParser.Value, start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `key`: The key of the pair (which is an Identifier). - - `value`: The value of the pair (which is a Value). - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.key = key - self.value = value - - def __str__(self) -> str: - return f"KeyValuePair({self.key}, {self.value})" - - class Value(Nonterminal): - """ - Abstracts away over the specific value. - """ - - value : CargoTomlParser.String | CargoTomlParser.Boolean | CargoTomlParser.List | CargoTomlParser.Dict - - def __init__(self, value: CargoTomlParser.String | CargoTomlParser.Boolean | CargoTomlParser.List | CargoTomlParser.Dict, start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `value`: The value of the Value. - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.value = value - - def __str__(self) -> str: - return f"Value({self.value})" - - class Dict(Nonterminal): - """ - Represents a dictionary of key/value pairs. - """ - - pairs : typing.List[CargoTomlParser.KeyValuePair] - - - def __init__(self, pairs: typing.List[CargoTomlParser.KeyValuePair], start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `pairs`: The list of KeyValuePairs in this dictionary. - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.pairs = pairs - - def __str__(self) -> str: - res = "Dict({\n" - for p in self.pairs: - res += f" {p},\n" - return res + "})" - - class List(Nonterminal): - """ - Represents a list of values. - """ - - values : typing.List[CargoTomlParser.Value] - - - def __init__(self, values: typing.List[CargoTomlParser.Value], start: typing.Tuple[int, int], end: typing.Tuple[int, int]): - """ - Constructor for the SectionHeader nonterminal. - - # Arguments - - `values`: The list of Values in this list. - - `start`: The (inclusive) start position of this token. - - `end`: The (inclusive) end position of this token. - """ - - CargoTomlParser.Nonterminal.__init__(self, start, end) - - self.values = values - - def __str__(self) -> str: - res = "List([" - for i, v in enumerate(self.values): - if i > 0: res += ", " - res += f"{v}" - return res + "])" - - - - _lines : typing.List[str] - _col : int - _line : int - - - def __init__(self, raw: str) -> None: - """ - Constructor for the CargoTomlParser. - - Arguments: - - `raw`: The raw text to parse as a Cargo.toml file. - """ - - # Initialize stuff - self._lines = raw.split('\n') - self._col = 0 - self._line = 0 - - def _token(self) -> Terminal | Exception | None: - """ - Consumes the next token from the internal text. - - If the returned value derived from an Exception, then the text is - invalid TOML. - If the returned value is None, then no more tokens are available. - """ - - start = (0, 0) - buffer = "" - mode = "start" - while self._line < len(self._lines): - if self._col >= len(self._lines[self._line]): - # Update the values - self._col = 0 - self._line += 1 - - # Throw errors where it matters - if mode == "identifier": - return CargoTomlParser.Identifier(buffer, start, (self._line - 1, len(self._lines[self._line - 1]) - 1)) - elif mode == "section": - return ValueError(f"{self._line}:{self._col}: Encountered unterminated section header (missing ']')") - elif mode == "string": - return ValueError(f"{self._line}:{self._col}: Encountered unterminated string (missing '\"')") - elif mode == "string_escape": - return ValueError(f"{self._line}:{self._col}: Missing escape character") - elif mode == "comment": - # Go back to start mode - mode = "start" - if self._line >= len(self._lines): - break - if self._col >= len(self._lines[self._line]): - continue - c = self._lines[self._line][self._col] - # print(f"\n >>> [{mode}] CHAR {self._line}:{self._col}: '{c}'") - - # Switch on the mode - if mode == "start": - start = (self._line, self._col) - - # Switch on the character - if (ord(c) >= ord('a') and ord(c) <= ord('z')) or (ord(c) >= ord('A') and ord(c) <= ord('Z')) or c == '_': - # Switch to parsing an identifier token - mode = "identifier" - buffer += c - self._col += 1 - continue - elif c == '\'' or c == '"': - # Switch to parsing as string literal - mode = "string" - self._col += 1 - continue - elif c == '=': - # Just parse as an equals-sign - self._col += 1 - return CargoTomlParser.Equals(start, start) - elif c == ',': - # Just parse as a comma - self._col += 1 - return CargoTomlParser.Comma(start, start) - elif c == '{': - # Return the character as a token - self._col += 1 - return CargoTomlParser.LCurly(start, start) - elif c == '}': - # Return the character as a token - self._col += 1 - return CargoTomlParser.RCurly(start, start) - elif c == '[': - # Simply return the LBracket - self._col += 1 - return CargoTomlParser.LSquare(start, start) - elif c == ']': - # Simply return the RBracket - self._col += 1 - return CargoTomlParser.RSquare(start, start) - elif c == ' ' or c == '\t' or c == '\r': - # Skip - self._col += 1 - continue - elif c == '#': - # Comment - mode = "comment" - self._col += 1 - continue - else: - self._col += 1 - return ValueError(f"{start[0]}:{start[1]}: Unexpected character '{c}'") - - elif mode == "identifier": - # Switch on the character - if (ord(c) >= ord('a') and ord(c) <= ord('z')) or \ - (ord(c) >= ord('A') and ord(c) <= ord('Z')) or \ - (ord(c) >= ord('0') and ord(c) <= ord('9')) or \ - c == '-' or c == '_': - # Keep parsing - buffer += c - self._col += 1 - continue - else: - # Done parsing; let start handle this char - - # If keyword, intercept that - if buffer == "true" or buffer == "false": - # It's a boolean instead - return CargoTomlParser.Boolean(buffer == "true", start, (self._line, self._col - 1)) - - # Otherwise, identifier - return CargoTomlParser.Identifier(buffer, start, (self._line, self._col - 1)) - - elif mode == "string": - # Switch on the character - if c == '\\': - # Parse as escaped string - mode = "string_escape" - self._col += 1 - continue - elif c == '"': - # We're done! - self._col += 1 - return CargoTomlParser.String(buffer, start, (self._line, self._col - 1)) - else: - # Parse as part of the token - buffer += c - self._col += 1 - continue - - elif mode == "string_escape": - # Switch on the character - if c == '\\' or c == '"' or c == '\'': - buffer += c - mode = "string" - self._col += 1 - continue - elif c == 'n': - buffer += '\n' - mode = "string" - self._col += 1 - continue - elif c == 't': - buffer += '\t' - mode = "string" - self._col += 1 - continue - elif c == 'r': - buffer += '\r' - mode = "string" - self._col += 1 - continue - else: - # Ignore - perror(f"{self._line}:{self._col}: Unknown escape character '{c}' (ignoring)") - mode = "string" - self._col += 1 - continue - - elif mode == "comment": - # Do nothing - self._col += 1 - continue - - else: - raise ValueError(f"Unknown mode '{mode}'; this should never happen!") - return None - - def _reduce(self, stack: typing.List[Symbol]) -> typing.Tuple[typing.List[Symbol], str | None | Exception]: - """ - Attempts to apply one of the reduction rules to the current stack of tokens. - - Upon success, returns some string that identifies the rule applied. - If no rule has been applied, returns None. - Upon failure, returns an Exception. - """ - - # Start at the end, go backwards to identify the rules - mode = "start" - i = len(stack) - 1 - list_values = [] - while i >= 0: - # Get the current symbol - s = stack[i] - - # Match the mode - if mode == "start": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.RSquare: - # Might be a section header or a list! - mode = "rsquare" - i -= 1 - continue - - elif type(s) == CargoTomlParser.RCurly: - # Might be a dict! - mode = "dict" - i -= 1 - continue - - elif type(s) == CargoTomlParser.String: - # Immediately cast to a value - return (stack[:i] + [ CargoTomlParser.Value(s, s.start, s.end) ], "value_string") - - elif type(s) == CargoTomlParser.Boolean: - # Immediately cast to a value - return (stack[:i] + [ CargoTomlParser.Value(s, s.start, s.end) ], "value_boolean") - - else: - # No rule (yet) - return (stack, None) - - else: - # Match the type of it - if type(s) == CargoTomlParser.SectionHeader: - # Cast to a Section - return (stack[:i] + [ CargoTomlParser.Section(s, [], s.start, s.end) ], "section_header") - - elif type(s) == CargoTomlParser.KeyValuePair: - # See if it is preceded by a Section - mode = "key_value_pair" - i -= 1 - continue - - elif type(s) == CargoTomlParser.Value: - # Might be a key/value pair - mode = "value" - i -= 1 - continue - - elif type(s) == CargoTomlParser.List: - # Cast to a value - return (stack[:i] + [ CargoTomlParser.Value(s, s.start, s.end) ], "value_list") - - elif type(s) == CargoTomlParser.Dict: - # Cast to a value - return (stack[:i] + [ CargoTomlParser.Value(s, s.start, s.end) ], "value_dict") - - else: - # No rule (yet) - return (stack, None) - - elif mode == "key_value_pair": - # Switch between token or not - if s.is_term: - # Ignore - return (stack, None) - - else: - # Match the type of it - if type(s) == CargoTomlParser.Section: - # Append to the section - s.pairs.append(typing.cast(CargoTomlParser.KeyValuePair, stack[i + 1])) - s.end = stack[i + 1].end - return (stack[:i + 1], "section_append") - - else: - # No rule (yet) - return (stack, None) - - elif mode == "rsquare": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.Identifier: - # Yes, on the road to section header! - mode = "rsquare_ident" - i -= 1 - continue - - elif type(s) == CargoTomlParser.LSquare: - # Empty list, we can only assume - new_l = CargoTomlParser.List([], stack[i].start, stack[i + 1].end) - return (stack[:i] + [ new_l ], "empty-list") - - else: - # No rule (yet) - return (stack, None) - - else: - # Match the type of it - if type(s) == CargoTomlParser.Value: - # It must be the start of a list - mode = "list" - continue - - else: - # No rule (yet) - return (stack, None) - - elif mode == "rsquare_ident": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.LSquare: - # Whohoo, replace them in the stack (reduce) - new_sh = CargoTomlParser.SectionHeader(typing.cast(CargoTomlParser.String, stack[i + 1]).value, stack[i + 2].start, stack[i].end) - return (stack[:i] + [ new_sh ], "section-header") - - else: - # No rule (yet) - return (stack, None) - - else: - # No rule (yet) - return (stack, None) - - elif mode == "dict": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.LCurly: - # It's an empty dict - new_d = CargoTomlParser.Dict([], stack[i].start, stack[i + 1].end) - return (stack[:i] + [ new_d ], "empty-dict") - - else: - return (stack[:i], ValueError(f"Invalid dict entry: expected a key/value pair, got {s}")) - - else: - # Match the type of it - if type(s) == CargoTomlParser.KeyValuePair: - # It's a key/value pair; start parsing it as such - list_values.append(s) - mode = "dict_pair" - i -= 1 - continue - - else: - return (stack[:i], ValueError(f"Invalid dict entry: expected a key/value pair, got {s}")) - - elif mode == "dict_pair": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.LCurly: - # End of the list - list_values.reverse() - new_d = CargoTomlParser.Dict(list_values, stack[i].start, stack[len(stack) - 1].end) - return (stack[:i] + [ new_d ], "dict") - - elif type(s) == CargoTomlParser.Comma: - # The list continious - mode = "dict" - i -= 1 - continue - - else: - return (stack[:i], ValueError(f"Invalid dict: expected ',' or '{{', got {s}")) - - else: - # We don't accept any nonterminals at this stage - return (stack[:i], ValueError(f"Invalid list: expected ',' or '[', got {s}")) - - elif mode == "list": - # Switch between token or not - if s.is_term: - # Match the type of it - if len(list_values) == 0 and type(s) == CargoTomlParser.LSquare: - # End of the list, but it is empty - new_l = CargoTomlParser.List([], stack[i].start, stack[i + 1].end) - return (stack[:i] + [ new_l ], "empty-list") - - else: - return (stack[:i], ValueError(f"Invalid list entry: expected a Value, got {s}")) - - else: - # Match the type of it - if type(s) == CargoTomlParser.Value: - # Yes, keep parsing - list_values.append(typing.cast(CargoTomlParser.KeyValuePair, s)) - mode = "list_value" - i -= 1 - continue - - else: - return (stack[:i], ValueError(f"Invalid list entry: expected a Value, got {s}")) - - elif mode == "list_value": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.LSquare: - # End of the list - list_values.reverse() - new_l = CargoTomlParser.List(typing.cast(typing.List[CargoTomlParser.Value], list_values), stack[i].start, stack[len(stack) - 1].end) - return (stack[:i] + [ new_l ], "list") - - elif type(s) == CargoTomlParser.Comma: - # The list continious - mode = "list" - i -= 1 - continue - - else: - return (stack[:i], ValueError(f"Invalid list: expected ',' or '[', got {s}")) - - else: - # We don't accept any nonterminals at this stage - return (stack[:i], ValueError(f"Invalid list: expected ',' or '[', got {s}")) - - elif mode == "value": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.Equals: - # Yes, good going! - mode = "value_equals" - i -= 1 - continue - - else: - # No rule (yet) - return (stack, None) - - else: - # No rule (yet) - return (stack, None) - - elif mode == "value_equals": - # Switch between token or not - if s.is_term: - # Match the type of it - if type(s) == CargoTomlParser.Identifier: - # It's a key/value pair - new_kvp = CargoTomlParser.KeyValuePair(typing.cast(CargoTomlParser.Identifier, stack[i]), typing.cast(CargoTomlParser.Value, stack[i + 2]), stack[i].start, stack[i + 2].end) - return (stack[:i] + [ new_kvp ], "key-value-pair") - - else: - # No rule (yet) - return (stack, None) - - else: - # No rule (yet) - return (stack, None) - - else: - raise ValueError(f"Unknown mode '{mode}'; this should never happen!") - - # Nothing to be done - return (stack, None) - - - def parse(self) -> typing.Tuple[typing.List[str], typing.List[Exception]]: - """ - Parses the internal Cargo.toml file to extract the list of - dependencies from it. - - Returns a list with the depedency folders of the given Cargo.toml, - excluding that of the Cargo.toml itself. - """ - - # Parse the tokens using a shift-reduce parser - errs = [] - stack: typing.List[CargoTomlParser.Symbol] = [] - while True: - # Get a new token - token = self._token() - - # Store errors for printing - if isinstance(token, Exception): - errs.append(token) - continue - if token is None: - break - - # Push it on the stack (shift) - stack.append(typing.cast(CargoTomlParser.Symbol, token)) - # print("Shifted") - - # # Print the stack (debug) - # print('[', end="") - # for (i, s) in enumerate(stack): - # if i > 0: print(" ", end="") - # print(f"{s}", end="") - # print(']\n'); - - # Now, attempt to (reduce) as much as possible - while True: - (stack, rule) = self._reduce(stack) - if isinstance(rule, Exception): - errs.append(rule) - continue - if rule is None: - break - # print(f"Applied rule '{rule}'") - - # # Print the stack (debug) - # print('[', end="") - # for (i, s) in enumerate(stack): - # if i > 0: print(" ", end="") - # print(f"{s}", end="") - # print(']\n'); - - # Now, in the parsed struct, attempt to extract the local crates - paths = [] - for section in stack: - # Make sure everything was parsed to a section - if type(section) != CargoTomlParser.Section: - errs.append(ValueError(f"Encountered stray symbol '{section}'")) - continue - - # Ignore any non-dependency section - if section.header.name != "dependencies" and section.header.name != "build-dependencies": continue - - # Iterate over the pairs - for dependency in section.pairs: - # Skip it the value is not a dict - if type(dependency.value.value) != CargoTomlParser.Dict: continue - - # Search the dict for a 'path' identifier - for pair in dependency.value.value.pairs: - if pair.key.value != "path": continue - - # Store the found path as a dependency folder - paths.append(typing.cast(CargoTomlParser.String, pair.value.value).value) - - # Return the result - return (paths, errs) - - - -class Arch: - """ - Defines a wrapper around architecture strings (to handle multiple - aliases). - """ - - _arch : str - _is_given : bool - _is_native : bool - - - def __init__(self) -> None: - # Don't reall do anything; just initialize an empty object - pass - - @staticmethod - def new(raw: str) -> Arch: - """ - Constructs a new Arch that is initialize from the given string. - """ - - # Get an empty object - arch = Arch() - - # Set the given values (casting them to set strings) - arch._arch = Arch.resolve(raw) - - # Set the properties a priori - arch._is_given = True - arch._is_native = arch._arch == Arch.host()._arch - - # Done! - return arch - - @staticmethod - def host() -> Arch: - """ - Returns a new Arch structure that is equal to the one running on the current machine. - - Uses "uname -m" to detect this. - """ - - # Open the process - try: - handle = subprocess.Popen(["uname", "-m"], stdout=subprocess.PIPE, text=True) - stdout, _ = handle.communicate() - except FileNotFoundError as e: - pdebug("Failed to run `uname -m` to detect Architecture; assuming Windows") - # Read the environment variable to find the architecture - stdout = os.environ["PROCESSOR_ARCHITECTURE"] - - # Parse the value, put it in an empty Arch object - arch = Arch() - arch._arch = Arch.resolve(stdout) - - # Overrride the propreties, then return - arch._is_given = False - arch._is_native = True - return arch - - - - def __str__(self) -> str: - """ - Returns the 'canonical' / human readable version of the Architecture. - """ - - return self._arch - - - - @staticmethod - def resolve(text: str) -> str: - """ - Resolves the given architecture string to a valid Arch internal string. - """ - - # Get a more forgiving version of the string - arch = text.lower().strip() - - # Cast it to the appropriate type or error - if arch == "x86_64" or arch == "amd64": - return "x86_64" - elif arch == "aarch64" or arch == "arm64": - return "aarch64" - else: - raise ValueError(f"Unknown architecture '{text}'") - - def is_given(self) -> bool: - """ - Returns whether or not the architecture is given manually or simply the host arch. - """ - - return self._is_given - - def is_native(self) -> bool: - """ - Returns whether or not the current architecture is native. - """ - - return self._is_native - - - - def to_rust(self) -> str: - """ - Returns the architecture in a way that is compatible with Rust. - """ - - return self._arch - - def to_docker(self) -> str: - """ - Returns the architecture in a way that is compatible with Docker. - """ - - return self._arch - - def to_juicefs(self) -> str: - """ - Returns the architecture in a way that is compatible with the JuiceFS image. - """ - - if self._arch == "x86_64": return "amd64" - else: return "arm64" - -class Os: - """ - Defines a wrapper around an OS string. - """ - - _os : str - _is_given : bool - _is_native : bool - - - def __init__(self) -> None: - """ - Initializes an 'empty' Os object. - """ - pass - - @staticmethod - def new(raw: str) -> Os: - """ - Constructor for the Os object. - - Arguments: - - `raw`: The raw OS string to parse. - """ - - # Get an empty object - os = Os() - - # Set the given values (casting them to set strings) - os._os = Os.resolve(raw) - - # Set the properties a priori - os._is_given = True - os._is_native = os._os == Os.host()._os - - # Done! - return os - - @staticmethod - def host() -> Os: - """ - Returns a new Os structure that is equal to the one running on the current machine. - - Uses "uname -s" to detect this. - """ - - # Open the process that we use to determine the host - try: - handle = subprocess.Popen(["uname", "-s"], stdout=subprocess.PIPE, text=True) - stdout, _ = handle.communicate() - except FileNotFoundError as e: - pdebug("Failed to run `uname -s` to detect OS; assuming Windows") - stdout = "windows" - - # Parse the value, put it in an empty Os object - os = Os() - os._os = Os.resolve(stdout) - - # Overrride the propreties, then return - os._is_given = False - os._is_native = True - return os - - - - def __str__(self) -> str: - """ - Returns the 'canonical' / human readable version of the Os. - """ - - return self._os - - - - @staticmethod - def resolve(text: str) -> str: - """ - Resolves the given OS string to a valid Os internal string. - """ - - # Get a more forgiving version of the string - os = text.lower().strip() - - # Cast it to the appropriate type or error - if os == "linux": - return "linux" - elif os == "darwin" or os == "macos": - return "darwin" - elif os == "windows": - return "windows" - else: - raise ValueError(f"Unknown OS '{text}'") - - def is_given(self) -> bool: - """ - Returns whether or not the OS is given manually or simply the host OS. - """ - - return self._is_given - - def is_native(self) -> bool: - """ - Returns whether or not the current OS is native. - """ - - return self._is_native - - - - def to_rust(self) -> str: - """ - Returns a string representation that makes sense for Rust. - """ - - return self._os - - - -class Match(abc.ABC): - """ - Baseclass for Match objects, which can express some condition on a string. - """ - - @abc.abstractmethod - def __init__(self) -> None: - # Simply init as empty (no parent stuff) - pass - - @abc.abstractmethod - def __str__(self) -> str: - pass - - @abc.abstractmethod - def match(self, _to_match: str, _args: argparse.Namespace) -> bool: - """ - Returns whether the given string is matched by this match. - """ - pass - -class LiteralMatch(Match): - """ - A match that matches a string literal. - """ - - _lit : str - - def __init__(self, literal: str) -> None: - """ - Constructor for the LiteralMatch. - - Arguments: - - `literal`: The literal to match. - """ - - # Do the parent thing - Match.__init__(self) - - # Store the literal to match - self._lit = literal - - def __str__(self) -> str: - """ - Returns a string representation of this match. - """ - - return self._lit - - def match(self, to_match: str, _args: argparse.Namespace) -> bool: - """ - Returns whether the given string is matched by this match. - """ - - # Literal matching is just... literally... matching... - return to_match == self._lit - -class StrippedMatch(Match): - """ - Matches the given string with a literal, but only after the new string has been stripped from whitespaces. - """ - - # The literal to match. - _lit : str - - def __init__(self, literal: str) -> None: - """ - Constructor for the StrippedMatch. - - Arguments: - - `literal`: The literal to match. - """ - - # Do the parent thing - Match.__init__(self) - - # Store the literal to match - self._lit = literal - - def __str__(self) -> str: - """ - Returns a string representation of this match. - """ - - return self._lit - - def match(self, to_match: str, _args: argparse.Namespace) -> bool: - """ - Returns whether the given string is matched by this match. - """ - - # Literal matching is just... literally... matching... - return to_match.strip() == self._lit - -class NegatedMatch(Match): - """ - A match that matches using another match, then negates the result. - """ - - _match : Match - - def __init__(self, match: Match) -> None: - """ - Constructor for the NegatedMatch. - - Arguments: - - `match`: The other match to negate. - """ - - # Do the parent thing - Match.__init__(self) - - # Store the literal to match - self._match = match - - def __str__(self) -> str: - """ - Returns a string representation of this match. - """ - - return f"Anything but '{self._match}'" - - def match(self, to_match: str, args: argparse.Namespace) -> bool: - """ - Returns whether the given string is matched by this match. - """ - - # Match using the nested one, then negate - return not self._match.match(to_match, args) - - - -class Command(abc.ABC): - """ - Baseclass for Commands, whether virtual or calling some subprocess. - """ - - @abc.abstractmethod - def __init__(self) -> None: - # Simply init as empty (no parent stuff) - pass - - def serialize(self, _args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - pass - - @abc.abstractmethod - def run(self, _args: argparse.Namespace) -> int: - """ - Runs the command. Returns the 'error code', which may be some wacky - stuff in the case of abstract commands. In any case, '0' means - success. - """ - pass - -class ShellCommand(Command): - """ - Command that runs some shell script. - """ - - _exec : str - _args : typing.List[str] - _cwd : str | None - _env : dict[str, str | None] - _description : str | None - _ignore_exit_code : bool - - - def __init__(self, exec: str, *args: str, cwd: str | None = None, env: dict[str, str | None] = {}, description: str | None = None, ignore_exit_code = False) -> None: - """ - Constructor for the Command class. - - Arguments: - - `exec`: The executable to run. - - `args`: An (initial) list of arguments to pass to the command. - - `cwd`: The current working directory for the command. Note that '$CWD' still resolves to the script's directory. - - `env`: The environment variables to set in the command. The values given here will overwrite or extend the default environment variables. To unset one, set it to 'None'. - - `description`: If given, replaces the description with this. Use '$CMD' to have part of it replaced with the command string. - """ - - # Set the base stuff - Command.__init__(self) - - # Populate ourselves, ez - self._exec = exec - self._args = list(args) - self._cwd = cwd - self._env = env - self._description = description - self.ignore_exit_code = ignore_exit_code - - def serialize(self, args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - - # Resolve the CWD - cwd = resolve_args(self._cwd, args) if self._cwd is not None else os.getcwd() - - # Compute the cmd string - scmd = self._exec if not " " in self._exec else f"\"{self._exec}\"" - for arg in self._args: - arg = arg.replace("$CMD_CWD", cwd) - arg = resolve_args(arg, args) - scmd += " " + (arg if not " " in arg else f"\"{arg}\"").replace("\\", "\\\\").replace("\"", "\\\"") - - # Compute the env string - env = os.environ.copy() - senv = "" - for (name, value) in self._env.items(): - # Mark all of these, but only the changes - if value is not None and name in env and env[name] == value: continue - if value is None and name not in env: continue - - # Possibly replace values - if value is not None: value = resolve_args(value, args) - svalue = (value if value is not None else '').replace("\\", "\\\\").replace("\"", "\\\"") - - # Add it to the string - if len(senv) > 0: senv += " " - senv += "{}={}".format(name, svalue if not " " in svalue else f"\"{svalue}\"") - - # If a description, return that instead - if self._description is not None: - # Possible replace with the command, though - description = self._description.replace("$CMD_CWD", cwd) - description = self._description.replace("$CMD", scmd) - description = self._description.replace("$ENV", senv) - return description - - # Otherwise, just return the command - return "{}{}".format(scmd, f" (with {senv})" if len(senv) > 0 else "") - - - - def cwd(self, cwd: str | None) -> None: - """ - Sets or overrides the command's CWD. - """ - - self._cwd = cwd - - def add(self, *args: str) -> None: - """ - Appends the given (string) arguments to the list of arguments. - """ - - self._args += list(args) - - def add_env(self, *args: typing.Tuple[str, str | None]) -> None: - """ - Sets the given (string, value) pair as an environment variable for this command. - - Use a value of 'None' to unset a value in the default environment. - """ - - # Add it, overwriting junk if necessary - for (name, value) in args: - self._env[name] = value - - - - def _prepare_run(self, args: argparse.Namespace) -> tuple[list[str], dict[str, str], str]: - """ - Prepares running the internal command by creating the command, environment dictionary and current working directory, respectively. - """ - - # Resolve the CWD - cwd = resolve_args(self._cwd, args) if self._cwd is not None else os.getcwd() - - # Construct the final environment - env = os.environ.copy() - for (name, value) in self._env.items(): - # Either insert or delete the value - if value is not None: - # Possibly replace values - value = resolve_args(value, args) - - # Done - env[name] = value - elif name in env: - del env[name] - - # Resolve the arguments - rargs = [ resolve_args(arg, args) for arg in self._args ] - - # We're done - return ([self._exec] + rargs, env, cwd) - - def run(self, args: argparse.Namespace) -> int: - """ - Runs the command. Returns the 'error code', which may be some wacky - stuff in the case of abstract commands. In any case, '0' means - success. - - Note that this respects the `args.dry_run` variable. - """ - - # Prepare what we need to run - (cmd, env, cwd) = self._prepare_run(args) - - # Start the process, but only if not dry-running - if not args.dry_run: - handle = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, env=env, cwd=cwd) - handle.wait() - return handle.returncode if not self.ignore_exit_code else 0 - else: - return 0 - - def run_with_capture(self, args: argparse.Namespace) -> tuple[int, str, str]: - """ - Runs the command, returning not only the error code but also a captured stdout and stderr. - - Note that this respects the `args.dry_run` variable. - """ - - # Prepare what we need to run - (cmd, env, cwd) = self._prepare_run(args) - - # Start the process, but only if not dry-running - if not args.dry_run: - handle = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd) - (stdout, stderr) = handle.communicate() - return (handle.returncode, stdout.decode("utf-8"), stderr.decode("utf-8")) - else: - return (0, "", "") - -class ConditionalShellCommand(Command): - """ - Defines a shell command that is actually two commands, and the second is only executed if the first returns a specific target. - """ - - _cond : ShellCommand - _cons : ShellCommand - _code : int - _stdout : typing.Optional[Match] - _stderr : typing.Optional[Match] - - def __init__(self, condition: ShellCommand, consequence: ShellCommand, target_code: int = 0, target_stdout: typing.Optional[Match] = None, target_stderr: typing.Optional[Match] = None) -> None: - """ - Constructor for the ConditionalShellCommand. - - Arguments - - `condition`: The command to run that determines the condition. - - `consequence`: The command to run if the `condition` returns the desired code/stdout/stderr. - - `target_code`: The code to match on the `condition`'s return state. If the `condition` returns anything else, the `consequence` is _not_ executed. - - `target_stdout`: If not None, then the stdout of the command must match the given string, lest the `consequence` will not be executed. The matching strategy is dictacted by the given Match class. - - `target_stderr`: If not None, then the stderr of the command must match the given string, lest the `consequence` will not be executed. The matching strategy is dictacted by the given Match class. - """ - - # Set the parent constructor - Command.__init__(self) - - # Store the other commands - self._cond = condition - self._cons = consequence - self._code = target_code - self._stdout = target_stdout - self._stderr = target_stderr - - def serialize(self, args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - - # Write the command we always run - s = f"Running '{self._cons.serialize(args)}' iff '{self._cond.serialize(args)}' returns exit code {self._code}" - # Add the stdout & stderr checks - if self._stdout is not None: - s += " and writes correct stdout" - if self._stderr is not None: - s += " and writes correct stderr" - # Done - return s - - def run(self, args: argparse.Namespace) -> int: - """ - Runs the conditional command, then maybe runs the other command. - """ - - # Run the conditional command first, catching stdout and whatnot - (code, stdout, stderr) = self._cond.run_with_capture(args) - - # Match it, with useful debug prints - if code != self._code: - pdebug(f"Not running consequent ({self._cons.serialize(args)}) because condition ({self._cond.serialize(args)}) did not return exit code {self._code} (got {code})") - return 0 - if self._stdout is not None and not self._stdout.match(stdout, args): - pdebug(f"Not running consequent ({self._cons.serialize(args)}) because condition ({self._cond.serialize(args)})'s stdout did not match\n\nExpected:\n{self._stdout}\n\nGot:\n{stdout}\n\n)") - return 0 - if self._stderr is not None and not self._stderr.match(stderr, args): - pdebug(f"Not running consequent ({self._cons.serialize(args)}) because condition ({self._cond.serialize(args)})'s stderr did not match\n\nExpected:\n{self._stderr}\n\nGot:\n{stderr}\n\n)") - return 0 - - # Now we can run the real command as usual - return self._cons.run(args) - -class MakeDirCommand(Command): - """ - A command that runs a platform-independent directory creation. - """ - - _path : str - _exists_ok : bool - - def __init__(self, path: str, exists_ok: bool = True) -> None: - """ - Constructor for the MakeDirCommand class. - - Arguments: - - `path`: The path to the directory to create. The usual replacements apply. - - `exists_ok`: If True, then we do not raise an error if the directory already exists. - """ - - # Set the base stuff - Command.__init__(self) - - # Store our own arguments - self._path = path - self._exists_ok = exists_ok - - def serialize(self, args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - - return f"Creating directory '{self._path}'" - - def run(self, _args: argparse.Namespace) -> int: - """ - Creates a directory with error catching. - """ - - # Only run if we're not dry running - if not args.dry_run: - try: - os.makedirs(self._path, exist_ok=self._exists_ok) - return 0 - except IOError as e: - perror(f"Failed to create directory '{self._path}': {e}") - return e.errno - -class CopyCommand(Command): - """ - A command that runs a platform-independent file copy. - """ - - _source : str - _target : str - - def __init__(self, source: str, target: str) -> None: - """ - Constructor for the CopyCommand. - - Arguments: - - `source`: The source file to copy from. - - `target`: The target file to copy to. - """ - - # Set the base stuff - Command.__init__(self) - - # Store the paths - self._source = source - self._target = target - - def serialize(self, _args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - - return f"Copying '{self._source}' to '{self._target}'" - - def run(self, args: argparse.Namespace) -> int: - """ - Copies the given file with error catching. - """ - - # Only run if we're not dry running - if not args.dry_run: - try: - shutil.copyfile(self._source, self._target) - return 0 - except IOError as e: - perror(f"Failed to copy '{self._source}' to '{self._target}': {e}") - return e.errno - -class PseudoCommand(Command): - """ - A command that actually just runs some Python code when executed. - """ - - _desc : str - _call : typing.Callable[[], int] - - - def __init__(self, description: str, callback: typing.Callable[[], int]) -> None: - """ - Constructor for the PseudoCommand class. - - Arguments: - - `description`: The string to print when running this command. - - `callback`: The code to run when the command is executed. - """ - - # Set the base stuff - Command.__init__(self) - - # Populate ourselves, ez - self._desc = description - setattr(self, "_call", callback) - - def serialize(self, _args: argparse.Namespace) -> str: - """ - Allows the Command to be formatted. - """ - - return self._desc - - - - def run(self, _args: argparse.Namespace) -> int: - """ - Runs the command. Returns the 'error code', which may be some wacky - stuff in the case of abstract commands. In any case, '0' means - success. - """ - - # Simply run the callback - return getattr(self, "_call")() - - - - - -##### TARGETS ##### -class Target(abc.ABC): - """ - Virtual baseclass for all targets. - """ - - name : str - description : str - - _srcs : typing.List[str] - _srcs_deps : dict[str, typing.List[str] | None] - _dsts : typing.List[str] - _deps : typing.List[str] - - - @abc.abstractmethod - def __init__(self, name: str, srcs: typing.List[str], srcs_deps: dict[str, typing.List[str] | None], dsts: typing.List[str], deps: typing.List[str], description: str) -> None: - """ - Baseclass constructor for the Target. - - # Arguments - - `name`: The name of the Target. - - `srcs`: A list of source files which the Target uses. Their state is cached, and any change to these sources will prompt a rebuild of this Target. If the list is empty, then it is assumed this information is unknown, and the Target must always be rebuild. - - `srcs_deps`: A list of source files that are produced by a dependency. The dictionary maps dependency names to a list of source files for that dependency. If the list is 'None' instead, then we rely on all files built by the dep. Note that dep-specific behaviour may always override and tell its parents to rebuild. - - `dsts`: A list of destination files which the Target generates. The Target may be rebuild, but any trigger of dependencies down the line is blocked if none of these files changes. If the list is empty, then it is assumed this information is unknown, and future Targets must always be rebuild. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - self.name = name - self.description = description - - self._srcs = srcs - self._srcs_deps = srcs_deps - self._dsts = dsts - self._deps = deps - - - - def srcs(self, args: argparse.Namespace) -> typing.List[str]: - """ - Returns the list of source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - return [ resolve_args(s, args) for s in self._srcs ] - - def srcs_deps(self, args: argparse.Namespace) -> dict[str, typing.List[str] | None]: - """ - Returns a dict that maps dependency-generated source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - return { dep: ((resolve_args(s, args) for s in srcs) if srcs is not None else srcs) for (dep, srcs) in self._srcs_deps.items() } - - def dsts(self, args: argparse.Namespace) -> typing.List[str]: - """ - Returns the list of source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - return [ resolve_args(d, args) for d in self._dsts ] - - def deps(self, _args: argparse.Namespace) -> typing.List[str]: - """ - Returns the dependencies of this Target. - - Child classes may override this method to conditionally return sources. - """ - - return self._deps - - - - def is_supported(self, _args: argparse.Namespace) -> str | None: - """ - Returns whether or not the tools and such are there to build this Target. - - If the target is supported, returns None. Otherwise, returns a - string description why this Target was not supported. - """ - - return None - - - - def is_outdated(self, args: argparse.Namespace) -> bool: - """ - Checks if the Target needs to be rebuild according to the common - changes (due to arguments or sources; dependencies are left to a - centralized implementation for performance). - - Child classes may overload '_is_outdated()', which determines - additional reasons for if a Target is outdated. - """ - - # The easiest way to check if a target is outdated is by examining the command-line arguments - if args.force: - pdebug(f"Target '{self.name}' is marked as outdated because '--force' was specified") - return True - - # Examine if any of the sources need to be updated - for src in self.srcs(args): - # Resolve it - src = resolve_args(src, args) - # Check if it needs to be recompiled - if cache_outdated(args, src, True): - pdebug(f"Target '{self.name}' is marked as outdated because source file '{src}' has never been compiled or has changed since last compilation") - return True - - # If any of the destination files is missing, that's an indication too - for dst in self.dsts(args): - # Resolve it - dst = resolve_args(dst, args) - # Check if it needs to be recompiled - if not os.path.exists(dst): - pdebug(f"Target '{self.name}' is marked as outdated because result file '{dst}' doesn't exist") - return True - - # Then also check if any of the relevant flags were different - if flags_changed(args, self.name): - pdebug(f"Target '{self.name}' is marked as outdated because it has never been compiled before or its previous compilation was with different flags") - return True - - # Otherwise, it's left to the child-specific implementation - return self._is_outdated(args) - - def _is_outdated(self, _args: argparse.Namespace) -> bool: - """ - Checks any child-specific reason for if a Target is outdated. - - If a child does not implement, then it always returns that nothing - else is needed to check for outdatedness. - """ - - return False - - - - def had_effect(self, args: argparse.Namespace, files: typing.List[str] | None = None) -> bool: - """ - Returns whether any of the destination files have changed since last compilation. - - If the given list of files is not None, then we only consider the given files, where an empty list means no files. - """ - - # Get the destination files and use the files list to reduce it - dsts = self.dsts(args) - if files is not None: - new_dsts = [] - for f in files: - f = resolve_args(f, args) - if f not in dsts: raise ValueError(f"Target '{self.name}' does not produce file '{f}'\nInstead: {dsts}") - new_dsts.append(f) - dsts = new_dsts - - # Examine if any of the remaining destination's cache has become outdated - for dst in dsts: - # Resolve it - dst = resolve_args(dst, args) - # Check if it was changed - if cache_outdated(args, dst, True): - pdebug(f"Rebuild of target '{self.name}' is marked as having an effect because the hash of resulting file '{dst}' has changed") - return True - - # Otherwise, check the child-dependent implementation - return self._had_effect(args) - - def _had_effect(self, _args: argparse.Namespace) -> bool: - """ - Checks any child-specific reason for if a Target had effect after building. - - If a child does not implement, then it always returns that nothing - else is needed to check for outdatedness. - """ - - return False - - - - def deps_outdated(self, args: argparse.Namespace) -> bool: - """ - Determines whether the files on which we depend from a dependency point of view is outdated or not. - """ - - # Simply call had_effect for all deps from which we expect source files - for (dep_name, srcs) in self.srcs_deps(args).items(): - # Resolve the dependency - if dep_name not in targets: - raise ValueError(f"Unknown dependency '{dep_name}'") - dep = targets[dep_name] - - # If the dependency changed anything of the relevant files, then we consider the deps outdated and thus this needs a rebuild - if dep.had_effect(args, srcs): return True - - # Otherwise, check child-dependent implementation - return self._deps_outdated(args) - - def _deps_outdated(self, _args: argparse.Namespace) -> bool: - """ - Allows children to determine whether the deps on which we depend from a dependency point of view is outdated or not. - """ - - # By default, we assume there is nothing besides the files given - return False - - - - def build(self, args: argparse.Namespace): - """ - Builds the target, and this Target alone. - - Updates caches and such if the Target was successfull. - """ - - # Compute some colour strings - debug_start = "\033[1m" if supports_color() else "" - error_start = "\033[31;1m" if supports_color() else "" - end = "\033[0m" if supports_color() else "" - - # Get the commands to run to compile this target and execute them one-by-one - cmds = self._cmds(args) - for cmd in cmds: - print(f" > {debug_start}{cmd.serialize(args)}{end}") - - # Run it - res = cmd.run(args) - if res != 0: - print(f"\n{debug_start}Job '{error_start}{cmd.serialize(args)}{end}{debug_start}' failed. See output above.{end}\n", file=sys.stderr) - exit(1) - - # Now update the sources - srcs = self.srcs(args) - for srcs_deps in self.srcs_deps(args).values(): srcs += srcs_deps - for src in srcs: - # Resolve it - src = resolve_args(src, args) - # Update it - update_cache(args, src, True) - - # And the flags - update_flags(args, self.name) - - @abc.abstractmethod - def _cmds(self, _args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - pass - -class AbstractTarget(Target): - """ - Defines the baseclass for abstract Targets, which do not build anything - but instead only trigger dependencies or conditionally evaluate to - other targets. - """ - - def __init__(self, name: str, deps: typing.List[str], description: str = "") -> None: - """ - Constructor for the AbstractTarget. - - # Arguments - - `name`: The name of the Target. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Simply call the parent constructor - super().__init__(name, [], {}, [], deps, description) - - @abc.abstractmethod - def redirect(self, _args: argparse.Namespace) -> Target: - """ - Redirects this AbstractTarget to a real target that will actually be build. - """ - pass - - - - def srcs(self, args: argparse.Namespace) -> typing.List[str]: - """ - Returns the list of source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - # First, resolve this Target to its internal one - target = self.redirect(args) - # Run the function on that target instead - return (target.srcs(args) if self != target else super().srcs(args)) - - def srcs_deps(self, _args: argparse.Namespace) -> dict[str, typing.List[str] | None]: - """ - Returns a dict that maps dependency-generated source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - # First, resolve this Target to its internal one - target = self.redirect(args) - # Run the function on that target instead - return (target.srcs_deps(args) if self != target else super().srcs_deps(args)) - - def dsts(self, _args: argparse.Namespace) -> typing.List[str]: - """ - Returns the list of source files upon which this Target relies. - - Child classes may override this method to conditionally return sources. - """ - - # First, resolve this Target to its internal one - target = self.redirect(args) - # Run the function on that target instead - return (target.dsts(args) if self != target else super().dsts(args)) - - def deps(self, _args: argparse.Namespace) -> typing.List[str]: - """ - Returns the dependencies of this Target. - - Child classes may override this method to conditionally return sources. - """ - - # First, resolve this Target to its internal one - target = self.redirect(args) - # Run the function on that target instead, but also add our dependencies (after it) - return (target.deps(args) if self != target else []) + super().deps(args) - - - - def is_outdated(self, args: argparse.Namespace) -> bool: - """ - Checks if the Target needs to be rebuild according to the common - changes (due to arguments or sources; dependencies are left to a - centralized implementation for performance). - - Child classes may overload '_is_outdated()', which determines - additional reasons for if a Target is outdated. - """ - - # Redirect the Target - target = self.redirect(args) - # Delegate it to it - return (target.is_outdated(args) if self != target else super().is_outdated(args)) - - - - def deps_outdated(self, args: argparse.Namespace) -> bool: - """ - Determines whether the files on which we depend from a dependency point of view is outdated or not. - """ - - # Redirect the Target - target = self.redirect(args) - # Delegate it to it - return (target.deps_outdated(args) if self != target else super().deps_outdated(args)) - - - - def had_effect(self, args: argparse.Namespace, files: typing.List[str] | None = None) -> bool: - """ - Returns whether any of the destination files have changed since last compilation. - """ - - # Redirect the Target - target = self.redirect(args) - # Delegate it to it - return (target.had_effect(args, files) if self != target else super().had_effect(args)) - - - - def build(self, args: argparse.Namespace): - """ - Builds the target, and this Target alone. - - Updates caches and such if the Target was successfull. - """ - - # Redirect the Target - target = self.redirect(args) - # Delegate it to it - return (target.build(args) if self != target else super().build(args)) - - - -class VoidTarget(AbstractTarget): - """ - A target that does nothing, but can be used to call dependencies. - """ - - - def __init__(self, name: str, deps: typing.List[str] = [], description: str = "") -> None: - """ - Constructor for the AbstractTarget class. - - Arguments: - - `name`: The name of the target. Only used within the script to reference it later. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Simply call the parent constructor - super().__init__(name, deps, description) - - def redirect(self, _args: argparse.Namespace) -> Target: - """ - Redirects this AbstractTarget to a real target that will actually be build. - """ - - # No redirection needs to happen - return self - - - - def _cmds(self, _args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Nothing to do - return [] - -class EitherTarget(AbstractTarget): - """ - Defines a build target that can switch between two different targets based on some runtime property. - """ - - _targets : dict[typing.Any, Target] - _opt_name : str - - - def __init__(self, name: str, opt_name: str, targets: dict[typing.Any, Target], deps: typing.List[str] = [], description: str = "") -> None: - """ - Constructor for the EitherTarget class. - - Arguments: - - `name`: The name of the target. Only used within the script to reference it later. - - `opt_name`: The name of the argument in the arguments dict to switch on. - - `targets`: The Value/Target mapping based on the given argument. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Set the toplevel stuff - super().__init__(name, deps, description) - - # Set the options - self._targets = targets - self._opt_name = opt_name - - def redirect(self, args: argparse.Namespace) -> Target: - """ - Redirects this AbstractTarget to a real target that will actually be build. - """ - - # Check which one based on the given set of arguments - val = getattr(args, self._opt_name) - if val not in self._targets: - raise ValueError(f"Value '{val}' is not a possible target for EitherTarget '{self.name}'") - - # No redirection needs to happen - return self._targets[val] - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Get the inner target - target = self.redirect(args) - - # Use that target's `cmds()` - return target._cmds(args) - - - -class ShellTarget(Target): - """ - A very simple Target that executed one or more Commands. - """ - - _commands : typing.List[Command] - - - def __init__(self, name: str, commands: typing.List[Command], srcs: typing.List[str] = [], srcs_deps: dict[str, typing.List[str] | None] = {}, dsts: typing.List[str] = [], deps: typing.List[str] = [], description: str = "") -> None: - """ - Baseclass constructor for the ShellTarget. - - # Arguments - - `name`: The name of the Target. - - `commands`: A list of Commands that will be executed when this Target runs. - - `srcs`: A list of source files which the Target uses. Their state is cached, and any change to these sources will prompt a rebuild of this Target. If the list is empty, then it is assumed this information is unknown, and the Target must always be rebuild. - - `srcs_deps`: A list of source files that are produced by a dependency. The dictionary maps dependency names to a list of source files for that dependency. If the list is 'None' instead, then we rely on all files built by the dep. Note that dep-specific behaviour may always override and tell its parents to rebuild. - - `dsts`: A list of destination files which the Target generates. The Target may be rebuild, but any trigger of dependencies down the line is blocked if none of these files changes. If the list is empty, then it is assumed this information is unknown, and future Targets must always be rebuild. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Call the parent constructor for most of it - super().__init__(name, srcs, srcs_deps, dsts, deps, description) - - # Store the commands too - self._commands = commands - - - - def _is_outdated(self, _args: argparse.Namespace) -> bool: - """ - The ShellTarget is always outdated, since we have no guarantees about what it does - """ - pdebug(f"Target '{self.name}' is marked as outdated because it executes arbitrary commands and we don't know when to execute them") - return True - - def _cmds(self, _args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Simply return the list - return self._commands - -class CrateTarget(Target): - """ - Defines a build target that relies on Cargo for build caching. - """ - - _pkgs : typing.List[str] - _target : str | None - _give_target_on_unspecified : bool - _force_dev : bool - _env : dict[str, str | None] - _support_checker : typing.Callable[[Target, argparse.Namespace], str | None] - - - def __init__(self, name: str, packages: str | typing.List[str], target: str | None = None, give_target_on_unspecified: bool = False, force_dev: bool = False, env: dict[str, str | None] = {}, support_checker: typing.Callable[[Target, argparse.Namespace], str | None] = lambda _this, _args: None, srcs: typing.List[str] = [], srcs_deps: dict[str, typing.List[str] | None] = {}, dsts: typing.List[str] = [], deps: typing.List[str] = [], description: str = "") -> None: - """ - Constructor for the CrateTarget class. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `packages`: The list of cargo packages to build for this target. Leave empty to build the default. - - `target`: An optional target to specify if needed. Should contain '$ARCH' which will be replaced with the desired architecture. - - `give_target_on_unspecified`: If True, does not specify '--target' in Cargo if the user did not explicitly specified so. - - `force_dev`: If given, always builds the development binary (i.e., never adds '--release' to the Cargo command). - - `env`: If given, overrides/adds environment variables for the build command. If set to 'None', then it unsets that environment variable instead. - - `srcs`: A list of source files which the Target uses. Their state is cached, and any change to these sources will prompt a rebuild of this Target. If the list is empty, then it is assumed this information is unknown, and the Target must always be rebuild. - - `srcs_deps`: A list of source files that are produced by a dependency. The dictionary maps dependency names to a list of source files for that dependency. If the list is 'None' instead, then we rely on all files built by the dep. Note that dep-specific behaviour may always override and tell its parents to rebuild. - - `dsts`: A list of destination files which the Target generates. The Target may be rebuild, but any trigger of dependencies down the line is blocked if none of these files changes. If the list is empty, then it is assumed this information is unknown, and future Targets must always be rebuild. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Resolve the packages to a list (always) - lpackages = [ packages ] if type(packages) == str else packages - - # Set the toplevel stuff - super().__init__(name, srcs, srcs_deps, dsts, deps, description) - - # Simply set the others - self._pkgs = lpackages - self._target = target - self._give_target_on_unspecified = give_target_on_unspecified - self._force_dev = force_dev - self._env = env - setattr(self, "_support_checker", support_checker) - - - - def is_supported(self, args: argparse.Namespace) -> str | None: - # Check if Cargo and Rust are installed - for (name, exe) in [ ("Cargo", "cargo"), ("Rust compiler", "rustc"), ("Package config", "pkgconf") ]: - handle = subprocess.Popen([ exe, "--version" ], text=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (stdout, stderr) = handle.communicate() - if handle.returncode != 0: - return f"{name} ({exe}) cannot be run: {stderr}" - - # Now check for any target-specific options - return self._support_checker(self, args) - - - - def _is_outdated(self, _args: argparse.Namespace) -> bool: - """ - The CrateTarget is always outdated, since we leave it to Cargo - """ - pdebug(f"Target '{self.name}' is marked as outdated because it relies on Cargo") - return True - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Start collecting the arguments for cargo - cmd = ShellCommand("cargo", "build", env=self._env) - if self._target is not None and (args.arch.is_given() or self._give_target_on_unspecified): - cmd.add("--target", resolve_args(self._target, args)) - if not self._force_dev and not args.dev: - cmd.add("--release") - for pkg in self._pkgs: - cmd.add("--package", pkg) - - # Done - return [ cmd ] - -class BuilderTarget(Target): - """ - Target that creates a docker buildx builder - """ - - def __init__(self, name: str, deps: typing.List[str] = [], description: str = ""): - """ - Constructor for the ImageTarget. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Set the super fields - super().__init__(name, [], {}, [], deps, description) - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the builder - """ - create_builder = ShellCommand("docker", "buildx", "create", "--driver", "docker-container", "--name", "brane-builder", ignore_exit_code = True) - - return [ create_builder ] - -class ImageTarget(Target): - """ - Target that builds an image according to a Dockerfile. - """ - - _dockerfile : str - _context : str - _target : str | None - _build_args : dict[str, str] - - - def __init__(self, name: str, dockerfile: str, destination: str, context: str = ".", target: str | None = None, build_args: dict[str, str] = {}, srcs: typing.List[str] = [], srcs_deps: dict[str, typing.List[str] | None] = {}, deps: typing.List[str] = [], description: str = ""): - """ - Constructor for the ImageTarget. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `dockerfile`: The location of the Dockerfile to build the image for. - - `destination`: The path of the resulting .tar image file. May contain special strings such as '$ARCH' or '$OS'. - - `context`: The folder used to resolve relative directories in the Dockerfile. - - `target`: The Docker target to build in the Dockerfile. Will build the default target if omitted. - - `build_args`: A list of build arguments to set when building the Dockerfile. - - `srcs`: A list of source files which the Target uses. Their state is cached, and any change to these sources will prompt a rebuild of this Target. If the list is empty, then it is assumed this information is unknown, and the Target must always be rebuild. - - `srcs_deps`: A list of source files that are produced by a dependency. The dictionary maps dependency names to a list of source files for that dependency. If the list is 'None' instead, then we rely on all files built by the dep. Note that dep-specific behaviour may always override and tell its parents to rebuild. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Set the super fields - super().__init__(name, [ dockerfile ] + srcs, srcs_deps, [ destination ], deps, description) - - # Set the local fields (destination is the only destination file) - self._dockerfile = dockerfile - self._context = context - self._target = target - self._build_args = build_args - - - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Resolve the destination path - destination = resolve_args(self._dsts[0], args) - - # Add a command for the output folder - mkdir = MakeDirCommand(os.path.dirname(destination)) - - # Construct the build command - build = ShellCommand("docker", "buildx", "build", "--output", f"type=docker,dest={destination}", "-f", self._dockerfile, "--builder", "brane-builder") - if args.arch.is_given(): build.add("--platform", args.arch.to_docker()) - if self._target is not None: build.add("--target", self._target) - for (name, value) in self._build_args.items(): - # Resolve the value - value = resolve_args(value, args) - # Add it - build.add("--build-arg", f"{name}={value}") - build.add(self._context) - - # Return the commands to run - return [ mkdir, build ] - -class InContainerTarget(Target): - """ - Target that builds something in a container (e.g., OpenSSL). - """ - - _image : str - _attach_stdin : bool - _attach_stdout : bool - _attach_stderr : bool - _keep_alive : bool - _volumes : typing.List[typing.Tuple[str, str]] - _context : str - _command : typing.List[str] - - - def __init__(self, name: str, image: str, attach_stdin: bool = True, attach_stdout: bool = True, attach_stderr: bool = True, keep_alive: bool = False, volumes: typing.List[typing.Tuple[str, str]] = [], context: str = ".", command: typing.List[str] = [], srcs: typing.List[str] = [], srcs_deps: dict[str, typing.List[str] | None] = {}, dsts: typing.List[str] = [], deps: typing.List[str] = [], description: str = "") -> None: - """ - Constructor for the ImageTarget. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `image`: The tag of the image to run. - - `attach_stdin`: Whether or not to attach stdin to the container's stdin. - - `attach_stdout`: Whether or not to attach stdout to the container's stdout. - - `attach_stderr`: Whether or not to attach stderr to the container's stderr. - - `keep_alive`: If given, attempts to use the container as a running server instead (favouring repeated builds). - - `volumes`: A list of volumes to attach to the container (using '-v', so note that the source path (the first argument) must be absolute. To help, you may use '$CWD'.). - - `context`: The build context for the docker command. - - `command`: A command to execute in the container (i.e., what will be put after its ENTRYPOINT if relevant). - - `srcs`: A list of source files which the Target uses. Their state is cached, and any change to these sources will prompt a rebuild of this Target. If the list is empty, then it is assumed this information is unknown, and the Target must always be rebuild. - - `srcs_deps`: A list of source files that are produced by a dependency. The dictionary maps dependency names to a list of source files for that dependency. If the list is 'None' instead, then we rely on all files built by the dep. Note that dep-specific behaviour may always override and tell its parents to rebuild. - - `dsts`: A list of destination files which the Target generates. The Target may be rebuild, but any trigger of dependencies down the line is blocked if none of these files changes. If the list is empty, then it is assumed this information is unknown, and future Targets must always be rebuild. - - `deps`: A list of dependencies for the Target. If any of these strong dependencies needs to be recompiled _any_ incurred changes, then this Target will be rebuild as well. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Run the parent constructor - super().__init__(name, srcs, srcs_deps, dsts, deps, description) - - # Add the source and targets - self._image = image - self._attach_stdin = attach_stdin - self._attach_stdout = attach_stdout - self._attach_stderr = attach_stderr - self._keep_alive = keep_alive - self._volumes = volumes - self._context = context - self._command = command - - - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # The user story is different per OS - if platform.system() != "Windows": - # Get the current user ID - handle = subprocess.Popen(["id", "-u"], text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (stdout, stderr) = handle.communicate() - if handle.returncode != 0: cancel(f"Failed to get current user ID using 'id -u':\n{stderr}") - uid = stdout.strip() - - # Get the current group ID - handle = subprocess.Popen(["id", "-g"], text=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (stdout, stderr) = handle.communicate() - if handle.returncode != 0: cancel(f"Failed to get current group ID using 'id -u':\n{stderr}") - gid = stdout.strip() - - # Put it in the global variable - user = (uid, gid) - else: - # We don't need to do any of this on Windows - user = None - - - - # Prepare the command - if self._keep_alive: - # Prepare the command that actually runs the container - start = ShellCommand("docker", "run", "--name", f"{self._image}", "-d", "--rm", "--entrypoint", "sleep") - for (src, dst) in self._volumes: - # Resolve the src and dst - src = resolve_args(src, args) - dst = resolve_args(dst, args) - # Add - start.add("-v", f"{src}:{dst}") - start.add(self._image, "infinity") - - # Wrap it in a conditional command to only run it if the container is not already running (to preserve state) - start = ConditionalShellCommand( - # The condition checks if it's already running - ShellCommand("docker", "ps", "-f", f"name={self._image}", "--format", "{{.Names}}"), - # The consequent starts the container - start, - # We match on the name of the container, which is only given if it already runs - target_code = 0, target_stdout = NegatedMatch(StrippedMatch(self._image)), target_stderr = None, - ) - - # # Build the start command (OS-dependent) - # if platform.system() != "Windows": - # c = f"[[ $(docker ps -f \"name={self._image}\" --format '{{{{.Names}}}}') == {self._image} ]] || docker run --name {self._image} -d --rm --entrypoint sleep" - # else: - # c = f"for /f \"delims=\" %i in ('docker ps -f \"name={self._image}\" --format \"{{{{.Names}}}}\"') do if \"%i\" == {self._image} docker run --name {self._image} -d --rm --entrypoint sleep" - - # # Attach any volumes to it + the container command itself - # for (src, dst) in self._volumes: - # # Resolve the src and dst - # src = resolve_args(src, args) - # dst = resolve_args(dst, args) - # # Add - # c += f" -v {src}:{dst}" - # c += f" {self._image} infinity" - - # # Start the container in the background if it didn't already - # if platform.system() != "Windows": - # start = ShellCommand("bash", "-c", c) - # else: - # start = ShellCommand("for ") - - # Now prepare to run the actual command within the container - run = ShellCommand("docker", "exec", "-it", self._image, "/build.sh") - for c in self._command: - # Do standard replacements in the command - c = resolve_args(c, args) - run.add(c) - cmds = [ start, run ] - - # If any volumes, add the commands that will restore the permissions - for (src, _) in self._volumes: - # Possibly replace the src - src = resolve_args(src, args) - # Add the command (OS-dependent) - if type(user) == tuple: - cmds.append(ShellCommand("sudo", "chown", "-R", f"{user[0]}:{user[1]}", src, description=f"Restoring user permissions to '{src}' ($CMD)...")) - - # Return the commands - return typing.cast(typing.List[Command], cmds) - - else: - # Do a fire-and-then-remove run of the container - cmd = ShellCommand("docker", "run", "--name", self._image) - if self._attach_stdin: cmd.add("--attach", "STDIN") - if self._attach_stdout: cmd.add("--attach", "STDOUT") - if self._attach_stderr: cmd.add("--attach", "STDERR") - for (src, dst) in self._volumes: - # Resolve the src and dst - src = resolve_args(src, args) - dst = resolve_args(dst, args) - # Add - cmd.add("-v", f"{src}:{dst}") - # Add the image - cmd.add(self._image) - # Add any commands - for c in self._command: - # Do standard replacements in the command - c = resolve_args(c, args) - cmd.add(c) - cmds = [ cmd ] - - # If any volumes, add the command that will restore the permissions - for (src, _) in self._volumes: - # Possibly replace the src - src = resolve_args(src, args) - # Add the command (OS-dependent) - if type(user) == tuple: - cmds.append(ShellCommand("sudo", "chown", "-R", f"{user[0]}:{user[1]}", src, description=f"Restoring user permissions to '{src}' ($CMD)...")) - - # Done, return it - return typing.cast(typing.List[Command], cmds) - - - -class InstallTarget(Target): - """ - Target that installs something (i.e., copies it to a target system folder). - """ - - - def __init__(self, name: str, source: str, target: str, dep: str, description: str = "") -> None: - """ - Constructor for the ImageTarget. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `source`: The source file to copy to. May contain special parameters such as '$ARCH'. - - `target`: The target file to copy from. May contain special parameters such as '$ARCH'. - - `dep`: The dependenciy that will produce the source file. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Run the parent constructor - super().__init__(name, [], { dep: [ source ] }, [ target ], [ dep ], description) - - - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Resolve the source and target paths - source = resolve_args(typing.cast(typing.List[str], self._srcs_deps[self._deps[0]])[0], args) - target = resolve_args(self._dsts[0], args) - - # Assert the target directory exists - mkdir = MakeDirCommand(os.path.dirname(target)) - # Prepare the copy - copy = CopyCommand(source, target) - - # Done, return it - return [ mkdir, copy ] - -class InstallImageTarget(Target): - """ - Target that installs something (i.e., copies it to a target system folder). - """ - - _tag : str - - - def __init__(self, name: str, source: str, tag: str, dep: str, description: str = "") -> None: - """ - Constructor for the ImageTarget. - - Arguments: - - `name`: The name of the target. Only used within this script to reference it later. - - `source`: The source location of the file to install. May contain special parameters such as '$ARCH'. - - `tag`: The tag that will be assigned to the new image. - - `dep`: The dependenciy that will produce the source image .tar file. - - `description`: If a non-empty string, then it's a description of the target s.t. it shows up in the list of all Targets. - """ - - # Run the parent constructor - super().__init__(name, [], { dep: [ source ] }, [], [ dep ], description) - - # Add the target tag (the source is stored in the general _srcs field) - self._tag = tag - - - - def _cmds(self, args: argparse.Namespace) -> typing.List[Command]: - """ - Returns the commands to run to build the target given the given - architecture and release mode. - - Will raise errors if it somehow fails to do so. - """ - - # Resolve the source path - source = resolve_args(typing.cast(typing.List[str], self._srcs_deps[self._deps[0]])[0], args) - - # Load the image digest - digest = get_image_digest(source) - - # Load the image.tar into the Docker engine and tag it - cmd = ShellCommand("docker", "load", "--input", source) - tag = ShellCommand("docker", "tag", digest, self._tag) - - # Return them all - return [ cmd, tag ] - - - - - -##### GLOBALS ##### -# Whether to print debug messages or not -debug: bool = False - -# A list of deduced sources -instance_srcs = { - f"{svc}" : deduce_toml_src_dirs(f"./brane-{svc}/Cargo.toml") - for svc in CENTRAL_SERVICES + WORKER_SERVICES - # Ignore services which we do not build from our source code - if svc != "chk" -} -for svc in instance_srcs: - if instance_srcs[svc] is None: cancel(f"Could not auto-deduce '{svc}-image' dependencies") - -# A list of all targets in the make file. -targets = { - "test-units" : ShellTarget("test-units", - [ ShellCommand("cargo", "test", "--all", "--all-targets", "--all-features") ], - description="Runs tests on the project by running the unit tests.", - ), - "test-clippy" : ShellTarget("test-clippy", - [ ShellCommand("cargo", "clippy", "--all", "--all-targets", "--all-features", "--", "--allow", "clippy::manual_range_contains") ], - description="Runs tests on the project by running the clippy linter.", - ), - "test-security" : ShellTarget("test-security", - [ ShellCommand("cargo", "audit") ], - description="Runs tests on the project by running the clippy linter.", - ), - "test" : VoidTarget("test", - deps=[ "test-units", "test-clippy", "test-security" ], - description="Runs tests on the project by running both the unit tests and the clippy linter.", - ), - "create-builder": BuilderTarget("build-target", - description="Creates the docker buildx builder needed to build a lot of the images", - ), - "build-image" : ImageTarget("build-image", - "./contrib/images/Dockerfile.build", "./target/debug/build.tar", - description="Builds the image that can be used to build Brane targets in-container.", - ), - "ssl-build-image" : ImageTarget("ssl-build-image", - "./contrib/images/Dockerfile.ssl", "./target/debug/ssl-build.tar", - description="Builds the image in which we can build OpenSSL." - ), - "openssl" : InContainerTarget("openssl", - "brane-ssl-build", volumes=[("$CWD", "/build")], command=["--arch", "$ARCH"], - dsts=OPENSSL_FILES, - deps=["install-ssl-build-image"], - description="Builds OpenSSL in a container to compile against when building the instance in development mode." - ), - - - - "cli" : EitherTarget("cli", - "con", { - True : InContainerTarget("cli-con", - "brane-build", volumes=[ ("$CWD", "/build") ], command=["brane-cli", "--arch", "$ARCH"], - keep_alive=True, - dsts=["./target/containers/x86_64-unknown-linux-musl/release/brane"], - deps=["install-build-image"], - ), - False : CrateTarget("cli-compiled", - "brane-cli", target="$ARCH-unknown-linux-musl", give_target_on_unspecified=False - ), - }, - description = "Builds the Brane Command-Line Interface (Brane CLI). You may use '--containerized' to build it in a container." - ), - "ctl" : EitherTarget("ctl", - "con", { - True : InContainerTarget("ctl-con", - "brane-build", volumes=[ ("$CWD", "/build") ], command=["brane-ctl", "--arch", "$ARCH"], - keep_alive=True, - dsts=["./target/containers/x86_64-unknown-linux-musl/release/branectl"], - deps=["install-build-image"], - ), - False : CrateTarget("ctl-compiled", - "brane-ctl", target="$ARCH-unknown-linux-musl", give_target_on_unspecified=False, - ) - }, - description = "Builds the Brane Command-Line Tool (Brane CTL). You may use '--containerized' to build it in a container." - ), - "cc" : EitherTarget("cc", - "con", { - True : InContainerTarget("cc-con", - "brane-build", volumes=[ ("$CWD", "/build") ], command=["brane-cc", "--arch", "$ARCH"], - keep_alive=True, - dsts=["./target/containers/x86_64-unknown-linux-musl/release/branec"], - deps=["install-build-image"], - ), - False : CrateTarget("cc-compiled", - "brane-cc", target="$ARCH-unknown-linux-musl", give_target_on_unspecified=False, - ), - }, - description = "Builds the Brane Command-Line Compiler (Brane CC). You may use '--containerized' to build it in a container." - ), - "branelet" : EitherTarget("branelet", - "con", { - True : InContainerTarget("branelet-con", - "brane-build", volumes=[ ("$CWD", "/build") ], command=["brane-let", "--arch", "$ARCH"], - keep_alive=True, - dsts=["./target/containers/x86_64-unknown-linux-musl/release/branelet"], - deps=["install-build-image"], - ), - False : CrateTarget("branelet-compiled", - "brane-let", target="$ARCH-unknown-linux-musl", give_target_on_unspecified=True, - ), - }, - description = "Build the Brane in-package executable, for use with the `build --init` command in the CLI. You may use '--containerized' to build it in a container." - ), - "instance" : VoidTarget("instance", - deps=[ f"{svc}-image" for svc in CENTRAL_SERVICES ] + [ f"{svc}-image" for svc in AUX_CENTRAL_SERVICES if svc != "xenon" ], - description="Builds the container images that comprise a central node in a Brane instance." - ), - "worker-instance" : VoidTarget("worker-instance", - deps=[ f"{svc}-image" for svc in WORKER_SERVICES ] + [ f"{svc}-image" for svc in AUX_WORKER_SERVICES ], - description="Builds the container images that comprise a worker node in a Brane instance." - ), - "libbrane_cli" : EitherTarget("libbrane_cli", - "con", { - True : InContainerTarget("libbrane_cli-con", - "brane-build", volumes=[ ("$CWD", "/build") ], command=["brane-cli-c", "--arch", "$ARCH"], - keep_alive=True, - dsts=["./target/containers/x86_64-unknown-linux-musl/release/libbrane_cli.so"], - deps=["install-build-image"], - ), - False : CrateTarget("libbrane_cli-compiled", - "brane-cli-c", target="$ARCH-unknown-linux-musl", give_target_on_unspecified=False, - ), - }, - description = "Builds the Brane CLI dynamic C-library (as an `.so`-file). You may use '--containerized' to build it in a container." - ), - - - - "install-build-image" : InstallImageTarget("install-build-image", - "./target/debug/build.tar", "brane-build", - dep="build-image", - description="Installs the build image by loading it into the local Docker engine" - ), - "install-ssl-build-image" : InstallImageTarget("install-ssl-build-image", - "./target/debug/ssl-build.tar", "brane-ssl-build", - dep="ssl-build-image", - description="Installs the OpenSSL build image by loading it into the local Docker engine" - ), - "install-cli" : InstallTarget("install-cli", - "./target/$RELEASE/brane", "/usr/local/bin/brane", - dep="cli", - description="Installs the CLI executable to the '/usr/local/bin' directory." - ), - "install-ctl" : InstallTarget("install-ctl", - "./target/$RELEASE/branectl", "/usr/local/bin/branectl", - dep="ctl", - description="Installs the CTL executable to the '/usr/local/bin' directory." - ), - "install-cc" : InstallTarget("install-cc", - "./target/$RELEASE/branec", "/usr/local/bin/branec", - dep="cc", - description="Installs the compiler executable to the '/usr/local/bin' directory." - ), - "install-instance" : VoidTarget("install-instance", - deps=[ f"install-{svc}-image" for svc in CENTRAL_SERVICES ] + [ f"install-{svc}-image" for svc in AUX_CENTRAL_SERVICES ], - description="Installs the central node of a Brane instance by loading the compiled images into the local Docker engine." - ), - "install-worker-instance" : VoidTarget("install-worker-instance", - deps=[ f"install-{svc}-image" for svc in WORKER_SERVICES ] + [ f"install-{svc}-image" for svc in AUX_WORKER_SERVICES ], - description="Installs a worker node of a Brane instance by loading the compiled images into the local Docker engine." - ), -} - -# Generate some really repetitive entries -for svc in CENTRAL_SERVICES + WORKER_SERVICES: - # Define the build arguments for the release image - rls_args = {} - dev_args = { "ARCH": "$ARCH" } - - # Generate the service binary targets for those that support it - if svc != "chk": - targets[f"{svc}-binary-dev"] = CrateTarget(f"{svc}-binary-dev", - f"brane-{svc}", target="$RUST_ARCH-unknown-linux-musl", give_target_on_unspecified=True, force_dev=True, env={ - "OPENSSL_DIR": "$CWD/" + OPENSSL_DIR, "OPENSSL_LIB_DIR": "$CWD/" + OPENSSL_DIR + "/lib", "RUSTFLAGS": "-C link-arg=-lgcc" - }, - srcs_deps={ "openssl": OPENSSL_FILES }, - dsts=[ f"./target/$RUST_ARCH-unknown-linux-musl/debug/brane-{svc}" ], - deps=[ "openssl" ], - description=f"Builds the brane-{svc} binary in development mode. Useful if you want to run it locally or build to a development image." - ) - # Generate the matching install target - targets[f"install-{svc}-binary-dev"] = InstallTarget(f"install-{svc}-binary-dev", - f"./target/$RUST_ARCH-unknown-linux-musl/debug/brane-{svc}", f"./.container-bins/$ARCH/brane-{svc}", - dep=f"{svc}-binary-dev", - description=f"Installs the brane-{svc} debug binary to a separate location in the repo where Docker may access it." - ) - else: - # Add the additional build argument to select the reasoner - rls_args["REASONER"] = "$REASONER" - dev_args["REASONER"] = "$REASONER" - - # Generate the service image build target - targets[f"{svc}-image"] = EitherTarget(f"{svc}-image", - "dev", { - False : ImageTarget(f"{svc}-image-release", - "./Dockerfile.rls", f"./target/release/brane-{svc}.tar", - target=f"brane-{svc}", - deps = ["create-builder"], - srcs=typing.cast(typing.List[str], instance_srcs[svc]) if svc != "chk" else [], - ), - True : ImageTarget(f"{svc}-image-debug", - "./Dockerfile.dev", f"./target/debug/brane-{svc}.tar", target=f"brane-{svc}", build_args=dev_args, - srcs_deps={ f"install-{svc}-binary-dev": [f"./.container-bins/$ARCH/brane-{svc}"] }, - deps=["create-builder", f"install-{svc}-binary-dev"], - ), - }, - description=f"Builds the container image for the brane-{svc} service to a .tar file. Depending on whether '--dev' is given, it either builds a full release image or a development-optimised debug image (that copies the executable from the './.container-bins' folder instead of building it in-container). The development-optimised image prevents having to rebuild every time, but also creates much larger images." - ) - # Generate the install targets for the image - targets[f"install-{svc}-image"] = InstallImageTarget(f"install-{svc}-image", - f"./target/$RELEASE/brane-{svc}.tar", f"brane-{svc}", - dep=f"{svc}-image", - description=f"Installs the brane-{svc} image by loading it into the local Docker engine." - ) - -for svc in AUX_CENTRAL_SERVICES + AUX_WORKER_SERVICES: - # We might do different things - if svc == "xenon": - # Generate the service image build target - targets[f"{svc}-image"] = ImageTarget(f"{svc}-image", - f"./contrib/images/Dockerfile.xenon", f"./target/$RELEASE/aux-{svc}.tar", build_args={ "JUICEFS_ARCH": "$JUICEFS_ARCH" }, - description=f"Builds the container image for the aux-{svc} auxillary service to a .tar file." - ) - - # Generate the install targets for the image - targets[f"install-{svc}-image"] = InstallImageTarget(f"install-{svc}-image", - f"./target/$RELEASE/aux-{svc}.tar", f"aux-{svc}", - dep=f"{svc}-image", - description=f"Installs the aux-{svc} image by loading it into the local Docker engine." - ) - - else: - raise ValueError(f"Unknown auxillary service '{svc}'") - - - - - -##### MAIN FUNCTIONS ##### -def show_targets(args: argparse.Namespace) -> typing.NoReturn: - """ - Shows a list of all Targets (that have a description) and then quits. - """ - - # Prepare colour strings - bold = "\033[1m" if supports_color() else "" - green = "\033[32;1m" if supports_color() else "" - red = "\033[31;1m" if supports_color() else "" - grey = "\033[90m" if supports_color() else "" - end = "\033[0m" if supports_color() else "" - - # Sort them - supported : typing.List[Target] = [] - unsupported : typing.List[typing.Tuple[Target, str]] = [] - for target_name in targets: - # Get the resolved target - target = targets[target_name] - - # Only show them if they have a description - if len(target.description) == 0: continue - - # Next, sort if they are supported by the current environment or not - reason = target.is_supported(args) - if reason is None: supported.append(target) - else: unsupported.append((target, reason)) - - # Print them neatly - if len(supported) > 0: - print("\nTargets supported by environment:") - for target in supported: - print(" - {}{}{}{}".format(green, target.name, end, f"{grey} ({type(target).__name__}){end}" if args.debug else "")) - print(f"{wrap_description(target.description, 3, 100)}") - print() - if len(unsupported) > 0: - print("\nTargets unsupported by environment:") - for (target, reason) in unsupported: - print(" - {}{}{}{}".format(red, target.name, end, f"{grey} ({type(target).__name__}){end}" if args.debug else "")) - print(f"{wrap_description(target.description, 3, 100)}") - if args.debug: - print(f" {grey}Reason:{end}") - print(f"{grey}{wrap_description(reason, 3, 100)}{end}") - print() - if len(supported) == 0 and len(unsupported) == 0: - print("\nNo targets found.\n") - - # Done - exit(0) - -def inspect(args: argparse.Namespace) -> typing.NoReturn: - """ - Shows detailled information about a given target. - """ - - # Prepare colour strings - bold = "\033[1m" if supports_color() else "" - green = "\033[32;1m" if supports_color() else "" - red = "\033[31;1m" if supports_color() else "" - grey = "\033[90m" if supports_color() else "" - end = "\033[0m" if supports_color() else "" - - # Make sure there is exactly one target - if len(args.target) == 0: - print(f"Missing target to inspect", file=sys.stderr) - exit(1) - elif len(args.target) > 1: - print(f"Too many targets to inspect; give only one", file=sys.stderr) - exit(1) - - # Resolve the target - if args.target[0] not in targets: - print(f"Unknown target '{args.target[0]}'") - exit(1) - target = targets[args.target[0]] - - # Collect targets properties - srcs = target.srcs(args) - dsts = target.dsts(args) - - # Print properties - print() - print(f"{bold}Target '{end}{green}{target.name}{end}{bold}':{end}") - print(f" {grey}-{end} Type {grey}:{end} {bold}{type(target).__name__}{end}") - print(f" {grey}-{end} Source files {grey}:{end} {grey}" + (wrap_description(", ".join([ f"{end}{bold}'{resolve_args(s, args)}'{end}{grey}" for s in srcs ]), 20, 100, skip_first_indent=True) if len(srcs) > 0 else "") + f"{end}") - print(f" {grey}-{end} Result files {grey}:{end} {grey}" + (wrap_description(", ".join([ f"{end}{bold}'{resolve_args(d, args)}'{end}{grey}" for d in dsts ]), 20, 100, skip_first_indent=True) if len(dsts) > 0 else "") + f"{end}") - print(f" {grey}-{end} Description {grey}:{end} {wrap_description(target.description, 20, 100, skip_first_indent=True)}") - - # Print if supported - reason = target.is_supported(args) - print(f" {grey}-{end} Supported {grey}?{end} {f'{green}yes{end}' if reason is None else f'{red}no{end}'}{end}") - if reason is not None: - print(f" {grey}└>{end} Reason{grey}:{end} {wrap_description(reason, 14, 100, skip_first_indent=True)}") - - # Print the dependency tree - print(f" {grey}-{end} Dependency tree{grey}:{end}") - to_traverse: typing.List[typing.Tuple[typing.List[str], typing.Tuple[str, typing.List[typing.Any]]]] = [ ([], build_dependency_tree(target.name, args)) ] - while len(to_traverse) > 0: - # Pop the last node - (indents, (name, node)) = to_traverse.pop() - node.reverse() - - # Print the name with the correct depth - print(f" {grey}{''.join(indents[:-1] + ([ '└> ' ] if len(indents) > 0 else []))}{end}{green if name == target.name else ''}{name}{end if name == target.name else ''}") - - # Add the next nodes - to_traverse += [(indents + [ "| " if i > 0 else " " ], n) for (i, n) in enumerate(node)] - print() - - # Done - exit(0) - -def should_rebuild(args: argparse.Namespace) -> typing.NoReturn: - """ - Using the returncode, indicates whether the given Target should be rebuild or not. - """ - - # Make sure there is exactly one target - if len(args.target) == 0: - print(f"Missing target to analyse rebuild status", file=sys.stderr) - exit(1) - elif len(args.target) > 1: - print(f"Too many targets to analyse rebuild status; give only one", file=sys.stderr) - exit(1) - - # Get the target - if args.target[0] not in targets: - print(f"Unknown target '{args.target[0]}'") - exit(1) - target = targets[args.target[0]] - - # Simply call the thing and check if anything needs to be done - steps = deduce_build_steps(target.name, args) - if len(steps) > 0: - exit(0) - else: - exit(1) - - - -def build_dependency_tree(target_name: str, args: argparse.Namespace, parent_name: str = "", acyclic: set[str] = set()) -> typing.Tuple[str, typing.List[typing.Any]]: - """ - Builds the dependency tree of the given target. - - The tree is structered as follows: - - Every element represents a node, as a (name, branches) tuple - - An empty branches list means a leaf - """ - - # Resolve the target and get its dependencies in all cases - if target_name not in targets: - raise ValueError(f"Unknown dependency '{target_name}' of '{parent_name}'") - target = targets[target_name] - deps = target.deps(args) - - # Add to the list of things we've already seen - acyclic.add(target_name) - - # Base case: no dependencies - if len(deps) == 0: - return (target_name, []) - else: - # Make sure there is no cyclic dep - for dep in deps: - if dep in acyclic: raise ValueError(f"Cyclic dependency detected: {dep} depends (transitively) on itself") - - # Get the dependencies of the dependencies as elements in the list - return (target_name, [ build_dependency_tree(dep, args, parent_name=target_name, acyclic=acyclic.copy()) for dep in deps ]) - - - -def deduce_build_steps(target_name: str, args: argparse.Namespace) -> typing.List[set[typing.Tuple[Target, bool]]]: - """ - Builds a list of things to build and the order in which to build them - based on the target's dependency. This respects whether a Target should - be rebuilt and whether it had any effect (leaving targets out if - nothing is to be done). - - The order in which they are build is equal to that given in the list of - dependencies per target. In this case, every entry may be seen as a - 'timestep', where every dependency adds one time offset (since it needs - to be build before its parent). - - The resulting list has one entry per 'timestep'. In other words, the - order of the nested list matters (and must be build front to back), but - the order within the nested lists may be arbitrary. - - Aside from the list, there is also an extra buffer that may be used to - deduce whether - - Finally, note that if the Target itself doesn't have to be rebuild, an - empty list is returned. - """ - - def recursive_rewrite(name: str, node: typing.List[typing.Any], wip: typing.List[set[str]], parent_name: str = "", depth: int = 0): - """ - Nested function that performs the recursive rewrite of the - dependency tree. - """ - - # Go deeper first to add the children first - for (dname, dnode) in node: - recursive_rewrite(dname, dnode, wip, depth=depth + 1) - - # Next, add this package in the list with the appropriate depth - while depth >= len(wip): wip.append(set()) - wip[depth].add(name) - - - - # Step 1: build a tree of all dependencies involved - (target_name, dep_tree) = build_dependency_tree(target_name, args) - - - - # Step 2: rewrite the tree into the names only, separated in sets that may be done in parallel - build_steps : typing.List[set[str]] = [] - recursive_rewrite(target_name, dep_tree, build_steps) - build_steps.reverse() - - - - # Step 3: remove duplicate dependencies, leaving the chain in the oldest timesteps - building = set() - for step in build_steps: - to_remove = [] - for dep in step: - if dep in building: - to_remove.append(dep) - else: - building.add(dep) - for dep in to_remove: - step.remove(dep) - - - - # Step 4: resolve to Targets and discard those that are not needed to be build - result: typing.List[set[typing.Tuple[Target, bool]]] = [] - for step in build_steps: - new_step = set() - for dep_name in step: - # Attempt to get the given dependency - if dep_name not in targets: - raise ValueError(f"Unknown dependency '{dep_name}'") - rdep = targets[dep_name] - - # Add it to the new steps, together with if it needs to be rebuild or not - # (independent of dependencies) - new_step.add((rdep, rdep.is_outdated(args))) - result.append(new_step) - - - - # Step 4: Done, return - return result - - - -def build_target(target_name, args) -> int: - """ - Builds a target, returning 0 if everything was succesfull. - - This function acts as the 'main' function of the script. - """ - - if target_name not in targets: - perror(f"Could not find target: {target_name}") - return 1 - - # Do a warning - if args.dry_run: - pwarning("Simulating build only; no commands are actually run (due to '--dry-run')") - - # Get the to-be-build targets - todo = deduce_build_steps(target_name, args) - pdebug("To build: " + (", ".join([", ".join([f"'{target.name}'{'?' if not outdated else ''}" for (target, outdated) in step]) for step in todo]) if len(todo) > 0 else "")) - for step in todo: - # Build all of these (order doesn't matter, in case we go multi-thread one day) - for (target, outdated) in step: - # If the target is not outdated, check if it needs to be rebuild according to its dependencies - if not outdated and not target.deps_outdated(args): continue - if not outdated: pdebug(f"Target '{target.name}' is marked as outdated because one of its dependencies was rebuild triggering relevant changes") - - # Otherwise, something wanted us to build it so do it - target.build(args) - - # Success! - return 0 - - - -# Actual entrypoint -if __name__ == "__main__": - # Start defining the CLI arguments - parser = argparse.ArgumentParser() - - # Define general things - parser.add_argument("--debug", action="store_true", help="If given, whether to print debug messages (including reasons for recompilation or not)") - - # Define 'alternative commands' (i.e., they do a job and then quit) - parser.add_argument("-t", "--targets", action="store_true", help="If given, shows a list of all supported targets, then quits.") - parser.add_argument("-i", "--inspect", action="store_true", help="If given, shows detailled information about the given Target and then quits.") - parser.add_argument("-r", "--should-rebuild", action="store_true", help="If given, returns whether the given Target should be rebuild or not by returning '0' as exit code if it should, and '1' if it shouldn't. Use with '--debug' to get information about what makes the Target outdated.") - - # Define things that influence the compilation mode - parser.add_argument("target", nargs="*", help="The target to build. Use '--targets' to see a complete list.") - parser.add_argument("--dev", "--development", action="store_true", help="If given, builds the binaries and images in development mode. This adds debug symbols to binaries, enables extra debug prints and (in the case of the instance) enables an optimized, out-of-image building procedure. Will result in _much_ larger images.") - parser.add_argument("--con", "--containerized", action="store_true", help=f"If given, will compile (some of) the binaries in a container instead of cross-compiling them.") - parser.add_argument("-f", "--force", action="store_true", help=f"If given, forces recompilation of all assets (regardless of whether they have been build before or not). Note that this does not clear any Cargo or Docker cache, so they might still consider your source to be cached (run `{sys.argv[0] if len(sys.argv) >= 1 else 'make.py'} clean` to clear those caches).") - parser.add_argument("-d", "--dry-run", action="store_true", help=f"If given, skips the effects of compiling the assets, only simulating what would be done (implies '--debug').") - - # Define settings - parser.add_argument("-o", "--os", help=f"Determines the OS for which to compile. Only relevant for the Brane-CLI. By default, will be the host's OS (host OS: '{Os.host()}')") - parser.add_argument("-a", "--arch", help=f"The target architecture for which to compile. By default, will be the host's architecture (host architecture: '{Arch.host()}')") - parser.add_argument("-c", "--cache", default="./target/make_cache", help="The location of the cache location for file hashes and such.") - parser.add_argument("-R", "--reasoner", default="eflint", help="Selects the reasoning backend for the `brane-chk` image if that's being build.") - - # Resolve arguments - args = parser.parse_args() - - # Set the debug flag - if args.debug: - debug = True - - # Resolve the OS - try: - args.os = Os.new(args.os) if args.os is not None else Os.host() - except ValueError: - cancel(f"Unknown OS '{args.os}'") - # Resolve the architecture - try: - args.arch = Arch.new(args.arch) if args.arch is not None else Arch.host() - except ValueError: - cancel(f"Unknown architecture '{args.arch}'") - - # Handle any 'alternative commands' - if args.targets: show_targets(args) - if args.inspect: inspect(args) - if args.should_rebuild: should_rebuild(args) - - # Make sure there is at least one target - if len(args.target) == 0: - print("No target specified; nothing to do.") - exit(0) - - # Before we begin, move the current working directory to that of the file itself - os.chdir(os.path.dirname(os.path.realpath(__file__))) - - # Then make sure we generate the cache directory with a CACHEDIR.tag - if not os.path.exists(args.cache): - # Go through the tree and create missing directories one-by-one - first = True - stack = [] - for d in os.path.split(args.cache): - # Check if it exists - path = os.path.join(*(stack + [d])) - stack.append(d) - if os.path.exists(path): - continue - - # If it doesn't, then generate the directory - os.mkdir(path) - - # Generate the CACHEDIR.TAG if it's the first one - if first: - tag_path = os.path.join(path, "CACHEDIR.TAG") - try: - with open(tag_path, "w") as h: - h.write(f"Signature: 8a477f597d28d172789f06886806bc55\n") - h.write(f"# This file is a cache directory tag created by BRANE's `make.py`.\n") - h.write(f"# For information about cache directory tags, see:\n") - h.write(f"# https://www.brynosaurus.com/cachedir/\n") - except IOError as e: - pwarning(f"Failed to generate CACHEDIR.TAG at '{tag_path}': {e}") - exit(e.errno) - first = False - - # Call for the given targets - for target in args.target: - res = build_target(target, args) - if res != 0: exit(res) - exit(0) diff --git a/policy_token.json b/policy_token.json deleted file mode 100644 index 8beb774d..00000000 --- a/policy_token.json +++ /dev/null @@ -1 +0,0 @@ -eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiIsImtpZCI6IkEifQ.eyJleHAiOjQ4Mjg2NjM2MzAsInVzZXJuYW1lIjoidGltIiwic3lzdGVtIjoid29ya0xpbnV4In0.JJRCQ_t9AOJ5BigyaiYcKMHUJL7GoZWyTWF-ya3WQGc \ No newline at end of file diff --git a/test.bs b/test.bs deleted file mode 100644 index 5641d9e2..00000000 --- a/test.bs +++ /dev/null @@ -1,4 +0,0 @@ -import hello_world; - -#[on("test")] -println(hello_world()); From 96ee19d5d24ddf598fd1bb1006581f8aac3aaa13 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Sun, 6 Apr 2025 22:26:18 +0200 Subject: [PATCH 06/12] feat(cd): Add nightly release --- .github/workflows/nightly_release.yml | 125 ++++++++++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 .github/workflows/nightly_release.yml diff --git a/.github/workflows/nightly_release.yml b/.github/workflows/nightly_release.yml new file mode 100644 index 00000000..1080542a --- /dev/null +++ b/.github/workflows/nightly_release.yml @@ -0,0 +1,125 @@ +name: Create the nightly release +on: + schedule: + # Take into account lut99's sleep schedule + - cron: 0 4 * * * + + # Manual mechanism to bump the nightly in case of a issue with the one from last night + workflow_dispatch: + inputs: + ref: + default: main + required: false + type: string + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +env: + CARGO_INCREMENTAL: 0 + CARGO_TERM_COLOR: always + GH_TOKEN: ${{ github.token }} + +jobs: + tag: + name: "Add nightly tag" + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref || 'main' }} + fetch-tags: true + - name: Check if there is anything to be done + run: | + [[ $(git rev-parse HEAD) != $(git rev-parse nightly) ]] + - name: Tag the latest commit + run: | + git config user.name "Brane" + git config user.email "brane@nonexistentemail.com" + git push --delete origin nightly || true + git tag -a nightly -m "Nightly release" + git push --tags + + build: + name: "Build & package" + needs: tag + strategy: + fail-fast: false + matrix: + include: + - runner: ubuntu-latest + os: linux + arch: x86_64 + - runner: macos-latest + os: macos + arch: aarch64 + - runner: windows-latest + os: windows + arch: x86_64 + - runner: ubuntu-24.04-arm + os: linux + arch: aarch64 + - runner: macos-13 + os: macos + arch: x86_64 + + runs-on: ${{ matrix.runner }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: nightly + fetch-tags: true + - name: Set up Docker Buildx + if: ${{ matrix.os == 'linux' }} + uses: docker/setup-buildx-action@v3 + - name: Install Go + if: ${{ matrix.os == 'macos' }} + uses: actions/setup-go@v5 + with: + go-version: '^1.23.4' + - name: Update package version in Cargo.toml + run: | + cargo run --release --no-default-features --package xtask set-version -p nightly -m '$git_hash$git_dirty' + - name: Build + run: | + cargo run --release --no-default-features --package xtask build all + - name: Package + run: | + cargo run --release --no-default-features --package xtask package github + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: build-${{ matrix.os }}-${{ matrix.arch }}-nightly + path: | + target/package/release/* + if-no-files-found: error + retention-days: 1 + + release: + name: "Release artifacts to GitHub" + needs: build + runs-on: ubuntu-latest + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + # without further specification, downloads all artifacts from the run + with: + path: release + merge-multiple: true + - name: Delete previous nightly release + run: gh -R $GITHUB_REPOSITORY release delete nightly + - name: Release + uses: softprops/action-gh-release@v2 + with: + files: | + release/* + fail_on_unmatched_files: true + tag_name: nightly + name: Nightly + prerelease: true + body: This is the nightly (daily) release of Brane. This build can occasionally break. Do not use in production. + draft: false + make_latest: false From 1a1ce5e9666cdad35b1eac061d886f374198bb4a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Sun, 6 Apr 2025 08:51:53 +0000 Subject: [PATCH 07/12] Add renovate.json --- renovate.json | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 renovate.json diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..5db72dd6 --- /dev/null +++ b/renovate.json @@ -0,0 +1,6 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:recommended" + ] +} From 5f62bb9b25957b8bd972b45ea2b1e3b588ac2b52 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Sun, 6 Apr 2025 11:04:37 +0200 Subject: [PATCH 08/12] chore(renovate): Customize initial configuration --- renovate.json | 6 ------ renovate.json5 | 27 +++++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 6 deletions(-) delete mode 100644 renovate.json create mode 100644 renovate.json5 diff --git a/renovate.json b/renovate.json deleted file mode 100644 index 5db72dd6..00000000 --- a/renovate.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "$schema": "https://docs.renovatebot.com/renovate-schema.json", - "extends": [ - "config:recommended" - ] -} diff --git a/renovate.json5 b/renovate.json5 new file mode 100644 index 00000000..9132e4a1 --- /dev/null +++ b/renovate.json5 @@ -0,0 +1,27 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:recommended", + ":semanticCommitTypeAll(chore)", + "schedule:earlyMondays", + ], + "labels": ["C-Dependencies"], + "prConcurrentLimit": 5, + "prHourlyLimit": 5, + "vulnerabilityAlerts": { + "addLabels": ["P-Critical", "C-Security"], + "assignees": ["@DanielVoogsgerd"] + }, + "packageRules": [ + { + "groupName": "all non-major dependencies (exclude 0.x.y)", + "groupSlug": "all-minor-patch-semver", + "matchPackageNames": ["*"], + "matchManagers": ["cargo"], + "matchUpdateTypes": ["minor", "patch"], + // Ignore the packages in development, see semver 2.0.0 + "matchCurrentValue": "!/^v?0\\./", + prPriority: 10 + } + ] +} From c5c31ff14dea33c699bba8e4dbc5e50f9b35cb7d Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Sun, 6 Apr 2025 23:24:52 +0200 Subject: [PATCH 09/12] Attempt to only group backwards compatible cargo updates --- renovate.json5 | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/renovate.json5 b/renovate.json5 index 9132e4a1..3757decd 100644 --- a/renovate.json5 +++ b/renovate.json5 @@ -12,16 +12,27 @@ "addLabels": ["P-Critical", "C-Security"], "assignees": ["@DanielVoogsgerd"] }, + // "lockFileMaintenance": { "enabled": true }, "packageRules": [ { + "matchUpdateTypes": ["minor"], + "matchManagers": ["cargo"], + "matchCurrentValue": "!/^v?0\\./", "groupName": "all non-major dependencies (exclude 0.x.y)", "groupSlug": "all-minor-patch-semver", - "matchPackageNames": ["*"], + "prPriority": 9, + }, + { + "matchUpdateTypes": ["patch"], "matchManagers": ["cargo"], - "matchUpdateTypes": ["minor", "patch"], - // Ignore the packages in development, see semver 2.0.0 - "matchCurrentValue": "!/^v?0\\./", - prPriority: 10 + "matchCurrentValue": "!/^v?0\\.0\\./", + "groupName": "all non-major dependencies (exclude 0.x.y)", + "groupSlug": "all-minor-patch-semver", + "prPriority": 9, + }, + { + "matchManagers": ["github-actions"], + "addLabels": ["A-CI", "A-CD"], } ] } From f5dd7937b4356d7e765e4a8edc1ab4a5b12fd6b8 Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Mon, 7 Apr 2025 11:21:20 +0200 Subject: [PATCH 10/12] chore(xtask): Set MSRV --- xtask/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index bda0e1ef..be3cdef3 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -1,5 +1,6 @@ [package] name = "xtask" +rust-version = "1.82.0" version.workspace = true edition.workspace = true authors.workspace = true From 86099a27461c80e3038c6c7caf28ce8bfb37861f Mon Sep 17 00:00:00 2001 From: Daniel Voogsgerd Date: Mon, 7 Apr 2025 11:21:31 +0200 Subject: [PATCH 11/12] Set renovate timezone --- renovate.json5 | 1 + 1 file changed, 1 insertion(+) diff --git a/renovate.json5 b/renovate.json5 index 3757decd..728babda 100644 --- a/renovate.json5 +++ b/renovate.json5 @@ -5,6 +5,7 @@ ":semanticCommitTypeAll(chore)", "schedule:earlyMondays", ], + "timezone": "Europe/Amsterdam", "labels": ["C-Dependencies"], "prConcurrentLimit": 5, "prHourlyLimit": 5, From 7da5c53c04ae52b36e88cdacd07cde5abbc0716b Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 1 Sep 2025 01:23:08 +0000 Subject: [PATCH 12/12] chore(deps): update rust crate console to 0.16.0 --- brane-ast/Cargo.toml | 2 +- brane-cli-c/Cargo.toml | 2 +- brane-cli/Cargo.toml | 2 +- brane-ctl/Cargo.toml | 2 +- brane-exe/Cargo.toml | 2 +- brane-shr/Cargo.toml | 2 +- brane-tsk/Cargo.toml | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/brane-ast/Cargo.toml b/brane-ast/Cargo.toml index 50479e52..5ae14a0c 100644 --- a/brane-ast/Cargo.toml +++ b/brane-ast/Cargo.toml @@ -8,7 +8,7 @@ repository.workspace = true license.workspace = true [dependencies] -console = "0.15.5" +console = "0.16.0" enum-debug.workspace = true lazy_static = "1.4.0" log = "0.4.22" diff --git a/brane-cli-c/Cargo.toml b/brane-cli-c/Cargo.toml index 9d2c8683..83348e5b 100644 --- a/brane-cli-c/Cargo.toml +++ b/brane-cli-c/Cargo.toml @@ -16,7 +16,7 @@ doc = false [dependencies] -console = "0.15.5" +console = "0.16.0" humanlog.workspace = true libc = "0.2.154" log = "0.4.22" diff --git a/brane-cli/Cargo.toml b/brane-cli/Cargo.toml index ed9f2da1..a0d76796 100644 --- a/brane-cli/Cargo.toml +++ b/brane-cli/Cargo.toml @@ -19,7 +19,7 @@ base64 = "0.22.0" bollard = "0.18.0" chrono = "0.4.35" clap = { version = "4.5.6", features = ["derive","env"] } -console = "0.15.5" +console = "0.16.0" dialoguer = "0.11.0" dirs = "6.0.0" dotenvy = "0.15.0" diff --git a/brane-ctl/Cargo.toml b/brane-ctl/Cargo.toml index 66056fe3..002b5c40 100644 --- a/brane-ctl/Cargo.toml +++ b/brane-ctl/Cargo.toml @@ -16,7 +16,7 @@ path = "src/main.rs" base64ct = "1.6.0" bollard = "0.18.0" clap = { version = "4.5.6", features = ["derive","env"] } -console = "0.15.5" +console = "0.16.0" dialoguer = "0.11.0" diesel = { version = "2.2.3", features = ["sqlite"] } diesel_migrations = "2.2.0" diff --git a/brane-exe/Cargo.toml b/brane-exe/Cargo.toml index f7b6ea0e..9fc269d9 100644 --- a/brane-exe/Cargo.toml +++ b/brane-exe/Cargo.toml @@ -11,7 +11,7 @@ license.workspace = true async-recursion = "1.0.0" async-trait = "0.1.67" base64 = "0.22.0" -console = "0.15.5" +console = "0.16.0" enum-debug.workspace = true futures = "0.3.24" lazy_static = "1.4.0" diff --git a/brane-shr/Cargo.toml b/brane-shr/Cargo.toml index 1c6f280e..b71b80b5 100644 --- a/brane-shr/Cargo.toml +++ b/brane-shr/Cargo.toml @@ -9,7 +9,7 @@ license.workspace = true [dependencies] async-compression = { version = "0.4.0", features = ["tokio","gzip"] } -console = "0.15.5" +console = "0.16.0" dialoguer = { version = "0.11.0", features = ["completion", "history"] } enum-debug.workspace = true fs2 = "0.4.0" diff --git a/brane-tsk/Cargo.toml b/brane-tsk/Cargo.toml index be262e62..39d7e7ac 100644 --- a/brane-tsk/Cargo.toml +++ b/brane-tsk/Cargo.toml @@ -14,7 +14,7 @@ base64 = "0.22.0" base64ct = { version = "1.6.0", features = ["alloc"] } bollard = "0.18.0" chrono = "0.4.35" -console = "0.15.5" +console = "0.16.0" dialoguer = "0.11.0" enum-debug.workspace = true futures-util = "0.3.30"