diff --git a/Cargo.lock b/Cargo.lock index 99c0bcaa5d..6eadca521a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1267,6 +1267,7 @@ dependencies = [ "bitflags 2.7.0", "chrono", "clap 2.33.1", + "clap 4.5.9", "configopt", "ctrlc", "dirs", diff --git a/components/common/src/cli/clap_validators.rs b/components/common/src/cli/clap_validators.rs index fa4e1eed30..fc2c0da008 100644 --- a/components/common/src/cli/clap_validators.rs +++ b/components/common/src/cli/clap_validators.rs @@ -70,3 +70,248 @@ impl clap_v4::builder::TypedValueParser for HabPackageInstallSourceValueParser { } } } + +/// Struct implementing validator for Habitat Origin +/// +/// Validates with `habitat_core::origin::Origin::validate` function. +#[derive(Clone)] +pub struct HabOriginValueParser; + +impl clap_v4::builder::TypedValueParser for HabOriginValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = habitat_core::origin::Origin::validate(val); + if result.is_err() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.err().unwrap(),))); + Err(err) + } else { + Ok(value.to_str().unwrap().to_string()) + } + } +} + +/// Struct implimenting validator that validates the value is a valid path +#[derive(Clone)] +pub struct FileExistsValueParser; + +impl clap_v4::builder::TypedValueParser for FileExistsValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, false, false, "is not a valid file") + } +} + +// TODO: This will be used by `hab config` (this implements the functionality of +// `file_exists_or_stdin` validator in Clap v2. +/// Struct implementing validator that validates the valie is a valid 'file' or 'stdin' +#[derive(Clone)] +pub struct FileExistsOrStdinValueParser; + +impl clap_v4::builder::TypedValueParser for FileExistsOrStdinValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, false, true, "is not a valid file or stdin") + } +} + +/// Struct implemeitng validator that valudates the value is a valid directory +/// +/// Internally uses `ValidPathValueParser` +#[derive(Clone)] +pub struct DirExistsValueParser; + +impl clap_v4::builder::TypedValueParser for DirExistsValueParser { + type Value = std::path::PathBuf; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + parse_ref_internal(cmd, arg, value, true, false, "is not a valid directory").map(|x| { + x.into() + }) + } +} + +// Checks whether a give path is a file or a dir or stdin, used internally by the validators +// +// eg. FileExistsValueParser will call this command with both `check_dir` and `check_stdin` set to +// false. DirExistsValueParser will call this command with `check_dir` set to `true` and +// `check_stdin` set to `false` etc. +fn check_valid_file_dir_stdin(path: &std::path::Path, check_dir: bool, check_stdin: bool) -> bool { + let mut is_valid = path.is_file(); + + if !is_valid && check_dir { + is_valid = path.is_dir(); + } + + is_valid = if check_stdin { + if is_valid { + is_valid + } else if let Some(v) = path.to_str() { + v == "-" + } else { + false + } + } else { + is_valid + }; + + is_valid +} + +fn parse_ref_internal(cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr, + check_dir: bool, + check_stdin: bool, + err_str: &str) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = std::path::Path::new(&val); + if !check_valid_file_dir_stdin(result, check_dir, check_stdin) { + let mut err = clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + err_str,))); + Err(err) + } else { + Ok(value.to_str().unwrap().to_string()) + } +} +/// Validate a given file is a 'toml' file or contains valid package idents only. +/// +/// Packages to be installed can be read from a 'toml' file or a file containing package idents +/// only. The actual validation of whether the contents of the 'toml' file are correct is performed +/// by the actual command that calls this validation. This validation will succeed if a file is a +/// 'toml' file (possibly in wrong format) or the file contains packaged identifiers, one per line. +#[derive(Clone)] +pub struct TomlOrPkgIdentFileValueParser; + +use crate::cli::{file_into_idents, + is_toml_file}; + +impl clap_v4::builder::TypedValueParser for TomlOrPkgIdentFileValueParser { + type Value = String; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + if is_toml_file(&val) { + return Ok(val); + } + + let result = file_into_idents(&val); + if result.is_err() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.err().unwrap(),))); + Err(err) + } else { + Ok(val) + } + } +} + +/// Validates whether given input is a valid Package Identifier +/// +/// This validator returns success if the given input is a valid simple Package Identifier or a +/// fully qualified PackageIdentifier +/// +/// Use `value_parser = HabPkgIdentValueParser::simple()` for simple Package Identifier. +/// Use `value_parser = HabPkgIdentValueParser::full()` for fully qualified Package Identifier. +#[derive(Clone)] +pub struct HabPkgIdentValueParser { + fully_qualified: bool, +} + +impl HabPkgIdentValueParser { + /// For Simple Package Identifier of the form 'origin/name' + pub fn simple() -> Self { Self { fully_qualified: false, } } + + /// For Full Package Identifier of the form 'origin/name/version/release' + pub fn full() -> Self { Self { fully_qualified: true, } } +} + +use habitat_core::package::ident::{FullyQualifiedPackageIdent, + PackageIdent}; + +impl clap_v4::builder::TypedValueParser for HabPkgIdentValueParser { + type Value = PackageIdent; + + fn parse_ref(&self, + cmd: &clap_v4::Command, + arg: Option<&clap_v4::Arg>, + value: &std::ffi::OsStr) + -> Result { + let val = value.to_str().unwrap().to_string(); + + let result = if self.fully_qualified { + FullyQualifiedPackageIdent::from_str(&val).err() + } else { + PackageIdent::from_str(&val).err() + }; + + if result.is_some() { + let mut err = + clap_v4::Error::new(clap_v4::error::ErrorKind::ValueValidation).with_cmd(cmd); + if let Some(arg) = arg { + err.insert(clap_v4::error::ContextKind::InvalidArg, + clap_v4::error::ContextValue::String(arg.to_string())); + } + err.insert(clap_v4::error::ContextKind::InvalidValue, + clap_v4::error::ContextValue::String(format!("`{}`: {}", + value.to_string_lossy(), + result.unwrap(),))); + Err(err) + } else { + Ok(val.into()) + } + } +} + +// TODO: Add Unit tests for all validators diff --git a/components/core/src/package/ident.rs b/components/core/src/package/ident.rs index 2af3f1d91b..f05ebf1952 100644 --- a/components/core/src/package/ident.rs +++ b/components/core/src/package/ident.rs @@ -224,6 +224,12 @@ impl FromStr for PackageIdent { } } +impl From for PackageIdent { + fn from(ident: String) -> Self { + Self::from_str(ident.as_str()).expect("Invalid Package Identifier") + } +} + impl PartialOrd for PackageIdent { /// Packages can be compared according to the following: /// diff --git a/components/hab/Cargo.toml b/components/hab/Cargo.toml index c7048cf0a8..e7633ba440 100644 --- a/components/hab/Cargo.toml +++ b/components/hab/Cargo.toml @@ -15,8 +15,6 @@ doc = false base64 = "*" bitflags = "*" chrono = {version = "*", features = ["serde"]} -clap = { git = "https://github.com/habitat-sh/clap.git", branch = "v2-master", features = [ "suggestions", "color", "unstable" ] } -configopt = { git = "https://github.com/habitat-sh/configopt.git" } ctrlc = "*" dirs = "*" env_logger = "*" @@ -42,7 +40,6 @@ same-file = "*" serde = { version = "*", features = ["derive"] } serde_json = { version = "*", features = [ "preserve_order" ] } serde_yaml = "*" -structopt = { git = "https://github.com/habitat-sh/structopt.git" } tabwriter = "*" tar = "*" termcolor = "*" @@ -55,6 +52,13 @@ walkdir = "*" rustls-webpki = { version = "*", features = ["alloc"] } tempfile = "*" + +clap = { git = "https://github.com/habitat-sh/clap.git", branch = "v2-master", features = [ "suggestions", "color", "unstable" ] , optional = true} +configopt = { git = "https://github.com/habitat-sh/configopt.git" , optional = true} +structopt = { git = "https://github.com/habitat-sh/structopt.git" , optional = true} + +clap_v4 = { version = "4", package = "clap", features = ["env", "derive", "string", "wrap_help"], optional = true } + [dependencies.uuid] version = "*" features = ["v4"] @@ -65,6 +69,8 @@ winapi = { version = "^0.3", features = ["winuser", "windef"] } winreg = "*" [features] -default = ["supported_targets"] +v2 = [ "clap", "configopt", "structopt" ] +v4 = [ "supported_targets", "clap_v4" ] +default = ["supported_targets", "v2"] functional = [] supported_targets = ["habitat_core/supported_targets"] diff --git a/components/hab/src/cli.rs b/components/hab/src/cli.rs index 0a631672ec..55a9ebeaa2 100644 --- a/components/hab/src/cli.rs +++ b/components/hab/src/cli.rs @@ -11,10 +11,7 @@ use habitat_common::{cli::{file_into_idents, use habitat_core::{origin::Origin as CoreOrigin, package::{Identifiable, PackageIdent}}; -use serde::{Deserialize, - Serialize}; -use std::{fmt, - path::Path, +use std::{path::Path, result, str::FromStr}; use structopt::StructOpt; @@ -23,45 +20,9 @@ use structopt::StructOpt; /// ran to completion with a successful result. The Launcher should not attempt to restart /// the Supervisor and should exit immediately with a successful exit code. pub const OK_NO_RETRY_EXCODE: i32 = 84; -pub const AFTER_HELP: &str = - "\nALIASES:\n apply Alias for: 'config apply'\n install Alias for: 'pkg \ - install'\n run Alias for: 'sup run'\n setup Alias for: 'cli setup'\n \ - start Alias for: 'svc start'\n stop Alias for: 'svc stop'\n term \ - Alias for: 'sup term'\n"; pub fn get(_feature_flags: FeatureFlag) -> App<'static, 'static> { Hab::clap() } -//////////////////////////////////////////////////////////////////////// - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Serialize)] -pub enum KeyType { - Public, - Secret, -} - -impl FromStr for KeyType { - type Err = crate::error::Error; - - fn from_str(value: &str) -> result::Result { - match value { - "public" => Ok(Self::Public), - "secret" => Ok(Self::Secret), - _ => Err(Self::Err::KeyTypeParseError(value.to_string())), - } - } -} - -impl fmt::Display for KeyType { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - KeyType::Public => write!(f, "public"), - KeyType::Secret => write!(f, "secret"), - } - } -} - -//////////////////////////////////////////////////////////////////////// - pub fn parse_optional_arg(name: &str, m: &ArgMatches) -> Option where ::Err: std::fmt::Debug { @@ -147,20 +108,20 @@ mod tests { "sup", "run", "--application", - "--environment=env"]); + "--environment=env",]); assert!(r.is_ok()); let r = get(no_feature_flags()).get_matches_from_safe(vec!["hab", "svc", "load", "--application=app", "--environment", - "pkg/ident"]); + "pkg/ident",]); assert!(r.is_ok()); let r = get(no_feature_flags()).get_matches_from_safe(vec!["hab", "svc", "load", "--application", - "pkg/ident"]); + "pkg/ident",]); assert!(r.is_ok()); } diff --git a/components/hab/src/cli/hab.rs b/components/hab/src/cli/hab.rs index 1287f2fb30..1f77922b00 100644 --- a/components/hab/src/cli/hab.rs +++ b/components/hab/src/cli/hab.rs @@ -10,10 +10,12 @@ pub mod ring; pub mod studio; pub mod sup; pub mod svc; -#[cfg(test)] -mod tests; pub mod user; pub mod util; + +#[cfg(test)] +mod tests; + #[cfg(any(target_os = "macos", any(all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), @@ -57,7 +59,7 @@ use self::{bldr::*, UserKeyGenerate}, util::{CacheKeyPath, ConfigOptCacheKeyPath}}; -use crate::{cli::AFTER_HELP, +use crate::{AFTER_HELP, VERSION}; use configopt::ConfigOpt; use structopt::{clap::AppSettings, diff --git a/components/hab/src/cli/hab/origin.rs b/components/hab/src/cli/hab/origin.rs index 94e41a2338..c32e401125 100644 --- a/components/hab/src/cli/hab/origin.rs +++ b/components/hab/src/cli/hab/origin.rs @@ -6,8 +6,8 @@ use super::util::{AuthToken, ConfigOptBldrOrigin, ConfigOptBldrUrl, ConfigOptCacheKeyPath}; -use crate::cli::{valid_origin, - KeyType}; +use crate::{cli::valid_origin, + key_type::KeyType}; use configopt::ConfigOpt; use habitat_core::origin::OriginMemberRole; use serde::Serialize; diff --git a/components/hab/src/cli/hab/pkg.rs b/components/hab/src/cli/hab/pkg.rs index e8b84e0962..18260342ca 100644 --- a/components/hab/src/cli/hab/pkg.rs +++ b/components/hab/src/cli/hab/pkg.rs @@ -63,7 +63,7 @@ pub struct PkgPath { /// Displays the binds for a service #[derive(ConfigOpt, StructOpt)] -#[structopt(name = "binds", no_version)] +#[structopt(name = "binds", no_version, settings = &[AppSettings::ArgRequiredElseHelp])] pub struct PkgBinds { #[structopt(flatten)] pkg_ident: PkgIdent, diff --git a/components/hab/src/cli/hab/tests.rs b/components/hab/src/cli/hab/tests.rs index 0b2338f5d5..4e40fca7d4 100644 --- a/components/hab/src/cli/hab/tests.rs +++ b/components/hab/src/cli/hab/tests.rs @@ -439,12 +439,14 @@ fn test_hab_svc_load_flag_ordering() { assert!(!load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "--force", "core/redis"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "--force", "core/redis"]) + .unwrap(); let load = extract_hab_svc_load(hab); assert!(load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "core/redis", "--force"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "load", "core/redis", "--force"]) + .unwrap(); let load = extract_hab_svc_load(hab); assert!(load.force); assert_eq!(load.pkg_ident.pkg_ident(), pkg_ident); @@ -452,7 +454,8 @@ fn test_hab_svc_load_flag_ordering() { #[test] fn test_hab_svc_update_empty_binds() { - let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "update", "core/redis", "--bind"]).unwrap(); + let hab = Hab::try_from_iter_with_configopt(&["hab", "svc", "update", "core/redis", "--bind"]) + .unwrap(); let update = extract_hab_svc_update(hab); assert_eq!(update.bind, Some(vec![])); diff --git a/components/hab/src/cli_v4.rs b/components/hab/src/cli_v4.rs new file mode 100644 index 0000000000..654b6efb81 --- /dev/null +++ b/components/hab/src/cli_v4.rs @@ -0,0 +1,141 @@ +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{ui::UI, + FeatureFlag}; + +use crate::{error::Result as HabResult, + AFTER_HELP, + VERSION}; + +mod pkg; +use pkg::PkgCommand; + +mod utils; +use utils::CacheKeyPath; + +#[derive(Debug, Clone, Parser)] +#[command(name = "hab", + version = VERSION, + about = "Patents: https://chef.io/patents\n\"A Habitat is the natural environment for your services\" - Alan Turing", + author = "\nThe Habitat Maintainers ", + after_help = AFTER_HELP, + arg_required_else_help = true, + propagate_version = true, + help_template = "{name} {version} {author-section} {about-section} \ + \n{usage-heading} {usage}\n\n{all-args}\n{after-help}\n", + )] +enum Hab { + /// Commands relating to Habitat Builder + Bldr(BldrCommand), + + /// Commands relating to Habitat runtime config + Cli(CliCommand), + + Config(ConfigCommand), + + File(FileCommand), + + License(LicenseCommand), + + Origin(OriginCommand), + + /// Commands relating to Habitat packages + #[clap(subcommand)] + Pkg(PkgCommand), + + Plan(PlanCommand), + + Ring(RingCommand), + + Studio(StudioCommand), + + Sup(SupCommand), + + SupportBundle, + + Svc(SvcCommand), + + User(UserCommand), + + // Aliases Below + Apply(ServiceConfigCommand), + + Install(PkgInstallCommand), + + Run(SupRunCommand), + + Setup(CacheKeyPath), + + Start(SvcStartCommand), + + Stop(SvcStopCommand), + + Term, +} + +impl Hab { + async fn do_cli_command(&self, ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + match self { + Self::Pkg(pkg_command) => pkg_command.do_command(ui, feature_flags).await, + _ => todo!(), + } + } +} + +#[derive(Clone, Debug, Parser)] +pub(crate) struct BldrCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct CliCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct ConfigCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct FileCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct LicenseCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct OriginCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct PlanCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct RingCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct StudioCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SupCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct UserCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct ServiceConfigCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct PkgInstallCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SupRunCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcStartCommand; + +#[derive(Clone, Debug, Parser)] +pub(crate) struct SvcStopCommand; + +pub async fn cli_driver(ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + let cli = Hab::parse(); + cli.do_cli_command(ui, feature_flags).await +} diff --git a/components/hab/src/cli_v4/pkg.rs b/components/hab/src/cli_v4/pkg.rs new file mode 100644 index 0000000000..dc3eadf282 --- /dev/null +++ b/components/hab/src/cli_v4/pkg.rs @@ -0,0 +1,190 @@ +// Implementation of `hab pkg` command + +use clap_v4 as clap; + +use clap::Subcommand; + +use habitat_common::{ui::UI, + FeatureFlag}; + +use crate::error::Result as HabResult; + +mod binds; +mod binlink; +mod build; +mod bulk_upload; + +mod channels; +mod config; + +mod delete; +mod demote; +mod dependencies; +mod download; + +mod env; +mod exec; + +#[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] +mod export; + +mod hash; +mod header; + +mod info; +mod install; + +mod list; + +mod uninstall; +mod upload; + +mod path; +mod promote; +mod provides; + +mod search; +mod sign; + +mod verify; + +#[derive(Clone, Debug, Subcommand)] +#[command(author = "\nThe Habitat Maintainers ", + arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(super) enum PkgCommand { + /// Displays the binds for a service + Binds(binds::PkgBindsOptions), + + /// Creates a binlink for a package binary in a common 'PATH' location + Binlink(binlink::PkgBinlinkOptions), + + /// Builds a plan using Habitat Studio + Build(build::PkgBuildOptions), + + /// Bulk uploads Habitat artifacts from to a depot from a local directory + Bulkupload(bulk_upload::PkgBulkUploadOptions), + + /// Find out what channels a package belongs to + Channels(channels::PkgChannelsOptions), + + /// Displays the default configuration options for a service + Config(config::PkgConfigOptions), + + /// Removes a package from Builder + Delete(delete::PkgDeleteOptions), + + /// Demote a package from a specified channel + Demote(demote::PkgDemoteOptions), + + /// Returns Habitat Artifact dependencies, by default the direct dependencies + /// of the package + Dependencies(dependencies::PkgDependenciesOptions), + + /// Download Habitat artifacts (including dependencies and keys) from Builder + Download(download::PkgDownloadOptions), + + /// Prints the runtime environment of a specific installed package + Env(env::PkgEnvOptions), + + /// Execute a command using the 'PATH' context of an installed package + Exec(exec::PkgExecOptions), + + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] + #[clap(subcommand)] + /// Exports the package to the specified format + Export(export::PkgExportCommand), + + /// Generates a blake2b hashsum from a target at any given filepath + Hash(hash::PkgHashOptions), + + /// Returns the Habitat Artifact header + Header(header::PkgHeaderOptions), + + /// Returns the Habitat Artifact information + Info(info::PkgInfoOptions), + + /// Installs a Habitat package from Builder or locally from a Habitat Artifact + Install(install::PkgInstallOptions), + + /// List all versions of installed packages + List(list::PkgListOptions), + + /// Prints the path to a specific installed release of a package + Path(path::PkgPathOptions), + + /// Promote a package to a specified channel + Promote(promote::PkgPromoteOptions), + + /// Search installed Habitat packages for a given file + Provides(provides::PkgProvidesOptions), + + /// Search for a package in Builder + Search(search::PkgSearchOptions), + + /// Signs an archive with an origin key, generating a Habitat Artifact + Sign(sign::PkgSignOptions), + + /// Safely uninstall a package and dependencies from a local filesystem + Uninstall(uninstall::PkgUninstallOptions), + + /// Uploads a local Habitat Artifact to Builder + Upload(upload::PkgUploadOptions), + + /// Verifies a Habitat Artifact with an origin key + Verify(verify::PkgVerifyOptions), +} + +impl PkgCommand { + pub(crate) async fn do_command(&self, + ui: &mut UI, + feature_flags: FeatureFlag) + -> HabResult<()> { + match self { + Self::Binds(opts) => opts.do_binds(), + Self::Binlink(opts) => opts.do_binlink(ui), + Self::Build(opts) => opts.do_build(ui, feature_flags).await, + Self::Bulkupload(opts) => opts.do_bulkupload(ui).await, + + Self::Channels(opts) => opts.do_channels(ui).await, + Self::Config(opts) => opts.do_config(), + + Self::Delete(opts) => opts.do_delete(ui).await, + Self::Demote(opts) => opts.do_demote(ui).await, + Self::Dependencies(opts) => opts.do_dependencies(), + Self::Download(opts) => opts.do_download(ui).await, + + Self::Env(opts) => opts.do_env(), + Self::Exec(opts) => opts.do_exec(), + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64")))] + Self::Export(cmd) => cmd.do_export(ui).await, + + Self::Hash(opts) => opts.do_hash(), + Self::Header(opts) => opts.do_header(ui), + + Self::Info(opts) => opts.do_info(ui), + Self::Install(opts) => opts.do_install(ui, feature_flags).await, + + Self::List(opts) => opts.do_list(), + + Self::Path(opts) => opts.do_path(), + Self::Promote(opts) => opts.do_promote(ui).await, + Self::Provides(opts) => opts.do_provides(), + + Self::Search(opts) => opts.do_search().await, + Self::Sign(opts) => opts.do_sign(ui), + + Self::Uninstall(opts) => opts.do_uninstall(ui).await, + Self::Upload(opts) => opts.do_upload(ui).await, + + Self::Verify(opts) => opts.do_verify(ui), + } + } +} diff --git a/components/hab/src/cli_v4/pkg/binds.rs b/components/hab/src/cli_v4/pkg/binds.rs new file mode 100644 index 0000000000..89d22823a8 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/binds.rs @@ -0,0 +1,29 @@ +// Implemenatation of `hab pkg binds` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + command::package::binds}; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBindsOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgBindsOptions { + pub(super) fn do_binds(&self) -> HabResult<()> { + binds::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/binlink.rs b/components/hab/src/cli_v4/pkg/binlink.rs new file mode 100644 index 0000000000..4dc9e768d3 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/binlink.rs @@ -0,0 +1,64 @@ +// Implemenatation of `hab pkg binlink` + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + BINLINK_DIR_ENVVAR, + DEFAULT_BINLINK_DIR}, + ui::UI}; + +use crate::{command::pkg::binlink, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBinlinkOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// The command to binlink (ex: bash) + #[arg(name = "BINARY")] + binary: Option, + + /// Set the destination directory + #[arg(name = "DEST_DIR", + short = 'd', + long = "dest", + env = BINLINK_DIR_ENVVAR, + default_value = DEFAULT_BINLINK_DIR)] + dest_dir: PathBuf, + + /// Overwrite existing binlinks + #[arg(name = "FORCE", short = 'f', long = "force", action = ArgAction::SetTrue)] + force: bool, +} + +impl PkgBinlinkOptions { + pub(super) fn do_binlink(&self, ui: &mut UI) -> HabResult<()> { + if let Some(binary) = &self.binary { + binlink::start(ui, + &self.pkg_ident, + &binary, + &self.dest_dir, + &FS_ROOT_PATH, + self.force) + } else { + binlink::binlink_all_in_pkg(ui, + &self.pkg_ident, + &self.dest_dir, + &FS_ROOT_PATH, + self.force) + } + } +} diff --git a/components/hab/src/cli_v4/pkg/build.rs b/components/hab/src/cli_v4/pkg/build.rs new file mode 100644 index 0000000000..7e6a43c566 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/build.rs @@ -0,0 +1,133 @@ +// Implemenatation of `hab pkg build` +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::ui::UI; + +use habitat_common::FeatureFlag; + +use habitat_core::{crypto, + crypto::keys::KeyCache, + origin::Origin}; + +use crate::{command::pkg::build, + error::Result as HabResult}; + +#[cfg(target_os = "linux")] +use crate::error::Error as HabError; + +use crate::cli_v4::utils::CacheKeyPath; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBuildOptions { + // TODO: Should multiple Origins be supported? The semantics looks like that but the original + // v2 code does not look like supporting. + /// Installs secret origin keys (ex: "unicorn", "acme,other,acme-ops") + #[arg(name = "HAB_ORIGIN_KEYS", short = 'k', long = "keys", action = ArgAction::Append)] + hab_origin_keys: Vec, + + // TODO: Make it a more semantic `PathBuf` Currently not done due to limitation of + // `command::pkg::build`. Revisit it after removing `clap-v2` + /// Sets the Studio root (default: /hab/studios/) + #[arg(name = "HAB_STUDIO_ROOT", short = 'r', long = "root")] + hab_studio_root: Option, + + // TODO: Same as above + /// Sets the source path [default: $PWD] + #[arg(name = "SRC_PATH", short = 's', long = "src")] + src_path: Option, + + // TODO : Same as above + /// A directory containing a plan file or a `habitat/` directory which contains the plan + /// file + #[arg(name = "PLAN_CONTEXT")] + plan_context: String, + + #[command(flatten)] + cache_key_path: CacheKeyPath, + + #[cfg(target_os = "linux")] + /// Build a native package on the host system without a studio + #[arg(name = "NATIVE_PACKAGE", short = 'N', long = "native-package", conflicts_with_all = &["REUSE", "DOCKER"])] + native_package: bool, + + #[cfg(any(target_os = "linux", target_os = "windows"))] + /// Reuses a previous Studio for the build (default: clean up before building) + // Only a truly native/local Studio can be reused--the Docker implementation will always be + // ephemeral + #[arg(name = "REUSE", short = 'R', long = "reuse", action = ArgAction::SetTrue)] + reuse: bool, + + #[cfg(any(target_os = "linux", target_os = "windows"))] + /// Uses a Dockerized Studio for the build + #[arg(name = "DOCKER", short = 'D', long = "docker", action = ArgAction::SetTrue)] + docker: bool, + + /// Channel used to retrieve plan dependencies for Chef supported origins + #[arg(name = "REFRESH_CHANNEL", + short = 'f', + long = "refresh-channel", + env = "HAB_REFRESH_CHANNEL", + default_value = "stable")] + refresh_channel: Option, +} + +impl PkgBuildOptions { + // Required because of lot of `cfg`... + #[allow(unused_variables)] + pub(super) async fn do_build(&self, ui: &mut UI, feature_flags: FeatureFlag) -> HabResult<()> { + if !self.hab_origin_keys.is_empty() { + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + for origin in self.hab_origin_keys.iter() { + // Validate that a secret signing key is present on disk + // for each origin. + key_cache.latest_secret_origin_signing_key(origin)?; + } + } + + let native_package = false; + + let native_package = self.should_build_native_package(feature_flags)?; + + let (reuse_flag, docker_flag) = (false, false); + + #[cfg(any(target_os = "linux", target_os = "windows"))] + let (reuse_flag, docker_flag) = (self.reuse, self.docker); + + build::start(ui, + self.plan_context.as_ref(), + self.hab_studio_root.as_deref(), + self.src_path.as_deref(), + &self.hab_origin_keys, + native_package, + reuse_flag, + docker_flag, + self.refresh_channel.as_deref()).await + } + + #[cfg(target_os = "linux")] + fn should_build_native_package(&self, feature_flags: FeatureFlag) -> HabResult { + if self.native_package { + if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { + return Err(HabError::ArgumentError(String::from("`--native-package` is only \ + available when \ + `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ + is set"))); + } + Ok(true) + } else { + Ok(false) + } + } + + #[cfg(not(target_os = "linux"))] + fn should_build_native_package(&self, _: FeatureFlag) -> HabResult { Ok(false) } +} diff --git a/components/hab/src/cli_v4/pkg/bulk_upload.rs b/components/hab/src/cli_v4/pkg/bulk_upload.rs new file mode 100644 index 0000000000..aa6367f6ff --- /dev/null +++ b/components/hab/src/cli_v4/pkg/bulk_upload.rs @@ -0,0 +1,85 @@ +// Implemenatation of `hab pkg bulkupload` + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::clap_validators::DirExistsValueParser, + ui::UI}; + +use habitat_core::{crypto::keys::KeyCache, + ChannelIdent}; + +use habitat_api_client::BuildOnUpload; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::bulkupload, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgBulkUploadOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Optional additional release channel to upload package to. Packages are always uploaded + /// to `unstable`, regardless of the value of this option + #[arg(name = "CHANNEL", short = 'c', long = "channel")] + channel: Option, + + /// Skip checking availability of package and force uploads, potentially overwriting a + /// stored copy of a package + #[arg(name = "FORCE", long = "force", action = ArgAction::SetTrue)] + force: bool, + + // TODO: This option is to be removed? + /// Enable auto-build for all packages in this upload. Only applicable to SaaS Builder + #[arg(name = "AUTO_BUILD", long = "auto-build", action = ArgAction::SetTrue)] + auto_build: bool, + + /// Skip the confirmation prompt and automatically create origins that do not exist in the + /// target Builder + #[arg(name = "AUTO_CREATE_ORIGINS", long = "auto-create-origins", action = ArgAction::SetTrue)] + auto_create_channels: bool, + + // TODO: Add Path Exists validator. + /// Directory Path from which artifacts will be uploaded + #[arg(name = "UPLOAD_DIRECTORY", value_parser = DirExistsValueParser)] + upload_directory: PathBuf, +} + +impl PkgBulkUploadOptions { + pub(super) async fn do_bulkupload(&self, ui: &mut UI) -> HabResult<()> { + let artifact_path = self.upload_directory.join("artifacts"); + let key_path = self.upload_directory.join("keys"); + let key_cache = KeyCache::new(key_path); + key_cache.setup()?; + + let auto_build = if self.auto_build { + BuildOnUpload::PackageDefault + } else { + BuildOnUpload::Disable + }; + + let auth_token = self.auth_token.from_cli_or_config()?; + + bulkupload::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &auth_token, + &artifact_path, + self.force, + auto_build, + self.auto_create_channels, + &key_cache).await + } +} diff --git a/components/hab/src/cli_v4/pkg/channels.rs b/components/hab/src/cli_v4/pkg/channels.rs new file mode 100644 index 0000000000..93182a84f0 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/channels.rs @@ -0,0 +1,57 @@ +// Implementation of `hab pkg channels` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::package::{target, + PackageIdent, + PackageTarget}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::channels, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgChannelsOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgChannelsOptions { + pub(super) async fn do_channels(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + channels::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + auth_token.as_deref()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/config.rs b/components/hab/src/cli_v4/pkg/config.rs new file mode 100644 index 0000000000..a90d08d8ce --- /dev/null +++ b/components/hab/src/cli_v4/pkg/config.rs @@ -0,0 +1,29 @@ +// Implemenatation of `hab pkg config` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + command::package::config}; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgConfigOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgConfigOptions { + pub(super) fn do_config(&self) -> HabResult<()> { + config::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/delete.rs b/components/hab/src/cli_v4/pkg/delete.rs new file mode 100644 index 0000000000..0734f0cef0 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/delete.rs @@ -0,0 +1,57 @@ +// Implementation of `hab pkg delete` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::package::{target, + PackageIdent, + PackageTarget}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::delete, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDeleteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgDeleteOptions { + pub(super) async fn do_delete(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + delete::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &auth_token).await + } +} diff --git a/components/hab/src/cli_v4/pkg/demote.rs b/components/hab/src/cli_v4/pkg/demote.rs new file mode 100644 index 0000000000..b7f2fece67 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/demote.rs @@ -0,0 +1,63 @@ +// Implementation of `hab pkg demote` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::demote, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDemoteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// Demote from the specified release channel + #[arg(name = "CHANNEL")] + channel: ChannelIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgDemoteOptions { + pub(super) async fn do_demote(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + demote::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &self.channel, + auth_token.as_str()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/dependencies.rs b/components/hab/src/cli_v4/pkg/dependencies.rs new file mode 100644 index 0000000000..2a58ab8b53 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/dependencies.rs @@ -0,0 +1,52 @@ +// Implemenatation of `hab pkg dependencies` + +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::{command::pkg::{dependencies, + DependencyRelation, + Scope}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDependenciesOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// Show transitive dependencies + #[arg(name = "TRANSITIVE", short = 't', long = "transitive", action= ArgAction::SetTrue)] + transitive: bool, + + /// Show packages which are dependant on this one + #[arg(name = "REVERSE", short = 'r', long = "reverse", action = ArgAction::SetTrue)] + reverse: bool, +} + +impl PkgDependenciesOptions { + pub(super) fn do_dependencies(&self) -> HabResult<()> { + let scope = if self.transitive { + Scope::PackageAndDependencies + } else { + Scope::Package + }; + + let relation = if self.reverse { + DependencyRelation::Supports + } else { + DependencyRelation::Requires + }; + + dependencies::start(&self.pkg_ident, scope, relation, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/download.rs b/components/hab/src/cli_v4/pkg/download.rs new file mode 100644 index 0000000000..7fc2fc1da2 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/download.rs @@ -0,0 +1,180 @@ +// Implementation of `hab pkg download` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::{clap_validators::{HabPkgIdentValueParser, + TomlOrPkgIdentFileValueParser}, + file_into_idents, + is_toml_file, + PACKAGE_TARGET_ENVVAR}, + ui::UI, + Error as HabitatCommonError}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::{download, + download::{PackageSet, + PackageSetFile}}, + error::Result as HabResult, + PRODUCT, + VERSION}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgDownloadOptions { + #[command(flatten)] + auth_token: AuthToken, + + #[command(flatten)] + bldr_url: BldrUrl, + + /// Download from the specified release channel. Overridden if channel is specified in toml + /// file + #[arg(name = "CHANNEL", + short = 'c', + long = "channel", + default_value = "stable")] + channel: ChannelIdent, + + /// The path to store downloaded artifacts + #[arg(name = "DOWNLOAD_DIRECTORY", long = "download-directory")] + download_directory: Option, + + /// File with newline separated package identifiers, or TOML file (ending with .toml extension) + #[arg(name = "PKG_IDENT_FILE", long = "file", num_args = 1..=10, value_parser = TomlOrPkgIdentFileValueParser)] + pkg_ident_file: Vec, + + /// One or more Package Identifiers to download (eg. core/redis) + #[arg(name = "PKG_IDENT", num_args = 1.., value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: Vec, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR, short = 't', long = "target")] + pkg_target: Option, + + /// Verify package integrity after download (Warning: this can be slow) + #[arg(name = "VERIFY", long = "verify", action = ArgAction::SetTrue)] + verify: bool, + + /// Ignore packages specified that are not present on the target Builder + #[arg(name = "IGNORE_MISSING_SEEDS", long = "ignore-missing-seeds", action = ArgAction::SetTrue)] + ignore_missing_seed: bool, +} + +impl PkgDownloadOptions { + pub(super) async fn do_download(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + let mut package_sets = vec![]; + + if !self.pkg_ident.is_empty() { + package_sets.push(PackageSet { target, + channel: self.channel.clone(), + idents: self.pkg_ident.clone() }); + } + let mut package_sets_from_file = self.idents_from_file_matches(target)?; + package_sets.append(&mut package_sets_from_file); + package_sets.retain(|set| !set.idents.is_empty()); + + download::start(ui, + &self.bldr_url.to_string(), + PRODUCT, + VERSION, + &package_sets, + self.download_directory.as_ref(), + auth_token.as_ref().map(|x| x.as_str()), + self.verify, + self.ignore_missing_seed).await + } + + fn idents_from_file_matches(&self, target: PackageTarget) -> HabResult> { + let mut sources: Vec = Vec::new(); + + if !self.pkg_ident_file.is_empty() { + for f in &self.pkg_ident_file { + if is_toml_file(f) { + let file_data = std::fs::read_to_string(f)?; + let toml_data: PackageSetFile = + toml::from_str(&file_data).map_err(HabitatCommonError::TomlParser)?; + sources.append(&mut toml_data.to_package_sets()?); + } else { + let idents_from_file = file_into_idents(f)?; + let package_set = PackageSet { idents: idents_from_file, + channel: self.channel.clone(), + target }; + sources.push(package_set) + } + } + } + Ok(sources) + } +} + +#[cfg(test)] +mod tests { + use super::{PackageTarget, + Parser, + PkgDownloadOptions}; + use std::{collections::HashMap, + path::Path}; + + #[test] + fn test_package_sets_from_file_e2e_tests_toml() { + let mut toml_files_map = HashMap::::new(); + toml_files_map.insert("bad_header.toml".to_string(), false); + toml_files_map.insert("bad_ident.toml".to_string(), false); + toml_files_map.insert("bad_target.toml".to_string(), false); + toml_files_map.insert("no_header.toml".to_string(), false); + toml_files_map.insert("no_target.toml".to_string(), true); + toml_files_map.insert("happy_path.toml".to_string(), true); + + let tomls_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let tomls_dir = Path::new(&tomls_dir).join("../../test/end-to-end/fixtures/pkg_download/"); + assert!(tomls_dir.is_dir()); + + let no_header_toml_string = "no_header.toml".to_string(); + let _ = toml_files_map.get(&no_header_toml_string); + for toml in tomls_dir.read_dir().unwrap() { + if let Ok(toml) = toml { + let key = toml.file_name().into_string().unwrap(); + let path = toml.path().into_os_string().into_string(); + eprintln!("{}: {:#?}", key, path); + if let Ok(path) = path { + let args = ["download", "--file", &path]; + let result = PkgDownloadOptions::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let pkg_download = result.unwrap(); + let result = + pkg_download.idents_from_file_matches(PackageTarget::active_target()); + let should_be_ok = toml_files_map.get(&key).unwrap(); + assert_eq!(result.is_ok(), + *should_be_ok, + "{}: {:#?}", + key, + result.err().unwrap()); + } + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/env.rs b/components/hab/src/cli_v4/pkg/env.rs new file mode 100644 index 0000000000..32f58ba245 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/env.rs @@ -0,0 +1,30 @@ +// Implementation of `hab pkg env` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::command::pkg::env; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgEnvOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgEnvOptions { + pub(super) fn do_env(&self) -> HabResult<()> { + env::start(&self.pkg_ident, &*FS_ROOT_PATH).map_err(Into::into) + } +} diff --git a/components/hab/src/cli_v4/pkg/exec.rs b/components/hab/src/cli_v4/pkg/exec.rs new file mode 100644 index 0000000000..d8026d9759 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/exec.rs @@ -0,0 +1,43 @@ +// Implementation of `hab pkg exec` command + +use clap_v4 as clap; + +use std::{ffi::OsString, + path::PathBuf}; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::package::PackageIdent; + +use crate::{command::pkg::exec, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgExecOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// The command to execute (ex: ls) + #[arg(name = "CMD")] + cmd: PathBuf, + + /// Arguments to be passed to the command + #[arg(name = "ARGS")] + args: Vec, +} + +impl PkgExecOptions { + pub(super) fn do_exec(&self) -> HabResult<()> { + // Required to convert to OsStr + // TODO: This should be internal implementation detail later on and move to actual command + // implementation when `v2` is removed + let args = self.args.iter().map(Into::into).collect::>(); + exec::start(&self.pkg_ident, &self.cmd, &args) + } +} diff --git a/components/hab/src/cli_v4/pkg/export.rs b/components/hab/src/cli_v4/pkg/export.rs new file mode 100644 index 0000000000..2f99e042ab --- /dev/null +++ b/components/hab/src/cli_v4/pkg/export.rs @@ -0,0 +1,72 @@ +// Implementation of `hab pkg export` command + +use std::ffi::OsString; + +use clap_v4 as clap; + +use clap::{Args, + Subcommand}; + +use habitat_common::ui::{UIWriter, + UI}; + +use crate::{command::pkg::export, + error::Result as HabResult}; + +#[derive(Debug, Clone, Args)] +pub(crate) struct PkgExportCommandOptions { + /// Arguments to be passed to the command + #[arg(name = "ARGS")] + args: Vec, +} + +#[derive(Debug, Clone, Subcommand)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) enum PkgExportCommand { + /// Container Exporter + #[cfg(any(target_os = "linux", target_os = "windows"))] + Container(PkgExportCommandOptions), + + #[cfg(any(target_os = "linux", target_os = "windows"))] + #[command(hide = true)] + Docker(PkgExportCommandOptions), + + /// Tar Exporter + #[cfg(any(target_os = "linux", target_os = "windows"))] + Tar(PkgExportCommandOptions), +} + +impl PkgExportCommand { + pub(super) async fn do_export(&self, ui: &mut UI) -> HabResult<()> { + match self { + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Container(opts) => { + export::container::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Docker(opts) => { + ui.warn("'hab pkg export docker' is now a deprecated alias for 'hab pkg export \ + container'. Please update your automation and processes accordingly.")?; + export::container::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Tar(opts) => { + export::tar::start(ui, + &opts.args + .iter() + .map(|s| OsString::from(s)) + .collect::>()).await + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/hash.rs b/components/hab/src/cli_v4/pkg/hash.rs new file mode 100644 index 0000000000..c243919402 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/hash.rs @@ -0,0 +1,44 @@ +// Implementation of `hab pkg hash` command + +use clap_v4 as clap; + +use std::io::BufRead; + +use clap::Parser; + +use habitat_common::cli::clap_validators::FileExistsValueParser; + +use crate::command::pkg::hash; + +use crate::error::Result as HabResult; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgHashOptions { + /// Filepath to the Habitat Package file + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: Option, /* TODO: Convert it to more semantic `PathBuf`, when we get rid of + * `clap-v2` functionality, revisit `command::pkg::hash` */ +} + +impl PkgHashOptions { + pub(super) fn do_hash(&self) -> HabResult<()> { + match &self.source { + Some(source) => { + // hash single file + hash::start(source.as_str()) + } + None => { + // read files from stdin + let stdin = std::io::stdin(); + for line in stdin.lock().lines() { + let file = line?; + hash::start(file.trim_end())?; + } + Ok(()) + } + } + } +} diff --git a/components/hab/src/cli_v4/pkg/header.rs b/components/hab/src/cli_v4/pkg/header.rs new file mode 100644 index 0000000000..4e18f5d4d5 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/header.rs @@ -0,0 +1,32 @@ +// Implementation of `hab pkg header` command +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::crypto; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{command::pkg::header, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgHeaderOptions { + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, +} + +impl PkgHeaderOptions { + pub(super) fn do_header(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + + header::start(ui, &PathBuf::from(&self.source)) + } +} diff --git a/components/hab/src/cli_v4/pkg/info.rs b/components/hab/src/cli_v4/pkg/info.rs new file mode 100644 index 0000000000..80467ad8bb --- /dev/null +++ b/components/hab/src/cli_v4/pkg/info.rs @@ -0,0 +1,41 @@ +// Implementation of `hab pkg info` command +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_core::crypto; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{command::pkg::info, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgInfoOptions { + /// Output will be rendered in json. (Includes extended metadata) + #[arg(name = "TO_JSON", + short = 'j', + long = "json", + action = ArgAction::SetTrue)] + json: bool, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, +} + +impl PkgInfoOptions { + pub(super) fn do_info(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + + info::start(ui, &Into::::into(self.source.clone()), self.json) + } +} diff --git a/components/hab/src/cli_v4/pkg/install.rs b/components/hab/src/cli_v4/pkg/install.rs new file mode 100644 index 0000000000..d9ae373c4c --- /dev/null +++ b/components/hab/src/cli_v4/pkg/install.rs @@ -0,0 +1,153 @@ +// Implementation of `hab pkg install` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{builder::NonEmptyStringValueParser, + parser::ValueSource, + ArgAction, + CommandFactory, + Parser}; + +use habitat_core::{env::Config, + fs::{cache_artifact_path, + FS_ROOT_PATH}, + ChannelIdent}; + +use habitat_common::{cli::{BINLINK_DIR_ENVVAR, + DEFAULT_BINLINK_DIR}, + command::package::install::{self, + InstallHookMode, + InstallMode, + InstallSource, + LocalPackageUsage}, + ui::UI, + FeatureFlag, + FEATURE_FLAGS}; + +use crate::{command::pkg::binlink, + error::Result as HabResult, + PRODUCT, + VERSION}; + +use crate::cli_v4::utils::{AuthToken, + BldrUrl}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + rename_all = "screaming_snake", + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgInstallOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// Install from the specified release channel + #[arg(short = 'c', + long = "channel", + default_value = "stable", + env = ChannelIdent::ENVVAR)] + channel: ChannelIdent, + + /// One or more Habitat package identifiers (ex: acme/redis) and/or filepaths to a Habitat + /// Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(required = true)] + pkg_ident_or_artifact: Vec, + + /// Binlink all binaries from installed package(s) into BINLINK_DIR + #[arg(short = 'b', long = "binlink")] + binlink: bool, + + /// Binlink all binaries from installed package(s) into BINLINK_DIR + #[arg(long = "binlink-dir", + default_value = DEFAULT_BINLINK_DIR, + env = BINLINK_DIR_ENVVAR, value_parser = NonEmptyStringValueParser::new())] + binlink_dir: String, + + /// Overwrite existing binlinks + #[arg(short = 'f', long = "force", action = ArgAction::SetTrue)] + force: bool, + + #[command(flatten)] + auth_token: AuthToken, + + /// Do not run any install hooks + #[arg(long = "ignore-install-hook", action = ArgAction::SetTrue)] + ignore_install_hook: bool, + + /// Install packages in offline mode + #[arg(long = "offline", + action = ArgAction::SetTrue, + hide = !FEATURE_FLAGS.contains(FeatureFlag::OFFLINE_INSTALL))] + offline: bool, + + /// Do not use locally-installed packages when a corresponding package cannot be installed + /// from Builder + #[arg(long = "ignore-local", + action = ArgAction::SetTrue, + )] + ignore_local: bool, +} + +impl PkgInstallOptions { + pub(crate) async fn do_install(&self, + ui: &mut UI, + feature_flags: FeatureFlag) + -> HabResult<()> { + let pkg_install_args: Vec<_> = std::env::args_os().skip(2).collect(); + + let auth_token = self.auth_token.try_from_cli_or_config(); + + let install_mode = if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && self.offline { + InstallMode::Offline + } else { + InstallMode::default() + }; + + let local_package_usage = if self.ignore_local { + LocalPackageUsage::Ignore + } else { + LocalPackageUsage::default() + }; + + let install_hook_mode = if !self.ignore_install_hook { + InstallHookMode::Ignore + } else { + InstallHookMode::default() + }; + + let matches = Self::command().get_matches_from(pkg_install_args); + let do_binlink = match matches.value_source("binlink_dir") { + Some(ValueSource::CommandLine) => true, + _ => self.binlink, + }; + + for install_source in &self.pkg_ident_or_artifact { + // let install_source = InstallSource::from_str(install_source)?; + let pkg_install = install::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &install_source, + PRODUCT, + VERSION, + &FS_ROOT_PATH, + &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), + auth_token.as_deref(), + &install_mode, + &local_package_usage, + install_hook_mode).await?; + + if do_binlink { + let binlink_dir = PathBuf::from(&self.binlink_dir); + binlink::binlink_all_in_pkg(ui, + pkg_install.ident(), + &binlink_dir, + &FS_ROOT_PATH, + self.force)?; + } + } + + Ok(()) + } +} diff --git a/components/hab/src/cli_v4/pkg/list.rs b/components/hab/src/cli_v4/pkg/list.rs new file mode 100644 index 0000000000..9f8faac002 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/list.rs @@ -0,0 +1,51 @@ +// Implemenatation of `hab pkg list` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_core::package::PackageIdent; + +use habitat_common::cli::clap_validators::HabOriginValueParser; + +use crate::{command::pkg::{list, + list::ListingType}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +#[group(required = true, multiple = false)] +pub(crate) struct PkgListOptions { + /// List all installed packages + #[arg(name = "ALL", short = 'a', long = "all")] + all: bool, + + // TODO : Validations + /// An origin to list + #[arg(name = "ORIGIN", short = 'o', long = "origin", value_parser = HabOriginValueParser)] + origin: Option, + + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT")] + pkg_ident: Option, +} + +impl PkgListOptions { + pub(super) fn do_list(&self) -> HabResult<()> { list::start(&self.into()) } +} + +impl From<&PkgListOptions> for ListingType { + fn from(opts: &PkgListOptions) -> Self { + if opts.all { + ListingType::AllPackages + } else if let Some(origin) = &opts.origin { + ListingType::Origin(origin.clone()) + } else if let Some(ident) = &opts.pkg_ident { + ListingType::Ident(ident.clone()) + } else { + unreachable!(); + } + } +} diff --git a/components/hab/src/cli_v4/pkg/path.rs b/components/hab/src/cli_v4/pkg/path.rs new file mode 100644 index 0000000000..66b5b17f35 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/path.rs @@ -0,0 +1,27 @@ +// Implementation of `hab pkg path` + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::cli::clap_validators::HabPkgIdentValueParser; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use crate::{command::pkg::path, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgPathOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, +} + +impl PkgPathOptions { + pub(super) fn do_path(&self) -> HabResult<()> { path::start(&self.pkg_ident, &FS_ROOT_PATH) } +} diff --git a/components/hab/src/cli_v4/pkg/promote.rs b/components/hab/src/cli_v4/pkg/promote.rs new file mode 100644 index 0000000000..d58e3590d7 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/promote.rs @@ -0,0 +1,63 @@ +// Implementation of `hab pkg promote` command + +use clap_v4 as clap; + +use clap::Parser; + +use habitat_common::{cli::{clap_validators::HabPkgIdentValueParser, + PACKAGE_TARGET_ENVVAR}, + ui::UI}; + +use habitat_core::{package::{target, + PackageIdent, + PackageTarget}, + ChannelIdent}; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::promote, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgPromoteOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + /// A fully qualified package identifier (ex: core/busybox-static/1.42.2/20170513215502) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::full())] + pkg_ident: PackageIdent, + + /// Promote to the specified release channel + #[arg(name = "CHANNEL")] + channel: ChannelIdent, + + /// A package target (ex: x86_64-windows) (default: system appropriate target) + #[arg(name = "PKG_TARGET", env = PACKAGE_TARGET_ENVVAR)] + pkg_target: Option, + + #[command(flatten)] + auth_token: AuthToken, +} + +impl PkgPromoteOptions { + pub(super) async fn do_promote(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let target = self.pkg_target.unwrap_or_else(|| { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } + }); + + promote::start(ui, + &self.bldr_url.to_string(), + (&self.pkg_ident, target), + &self.channel, + auth_token.as_str()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/provides.rs b/components/hab/src/cli_v4/pkg/provides.rs new file mode 100644 index 0000000000..ecb04cd270 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/provides.rs @@ -0,0 +1,38 @@ +// Implementation of `hab pkg provides` command + +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_core::fs::FS_ROOT_PATH; + +use crate::{command::pkg::provides, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgProvidesOptions { + /// File name to find + #[arg(name = "FILE")] + file: String, + + /// Show fully qualified package names (ex: core/busybox-static/1.24.2/20160708162350) + #[arg(name = "FULL_RELEASES", short = 'r', action = ArgAction::SetTrue)] + full_releases: bool, + + /// Show full path to file + #[arg(name = "FULL_PATHS", short = 'p', action = ArgAction::SetTrue)] + full_paths: bool, +} + +impl PkgProvidesOptions { + pub(super) fn do_provides(&self) -> HabResult<()> { + provides::start(&self.file, + &FS_ROOT_PATH, + self.full_releases, + self.full_paths) + } +} diff --git a/components/hab/src/cli_v4/pkg/search.rs b/components/hab/src/cli_v4/pkg/search.rs new file mode 100644 index 0000000000..b3e6045679 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/search.rs @@ -0,0 +1,41 @@ +// Implementation of `hab pkg search` command + +use clap_v4 as clap; + +use clap::Parser; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl}, + command::pkg::search, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgSearchOptions { + /// Search term + #[arg(name = "SEARCH_TERM")] + search_term: String, + + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Limit how many packages to retrieve + #[arg(name = "LIMIT", short = 'l', long = "limit", default_value_t = 50)] + limit: usize, +} + +impl PkgSearchOptions { + pub(crate) async fn do_search(&self) -> HabResult<()> { + let auth_token = self.auth_token.try_from_cli_or_config(); + + search::start(&self.search_term, + &self.bldr_url.to_string(), + self.limit, + auth_token.as_deref()).await + } +} diff --git a/components/hab/src/cli_v4/pkg/sign.rs b/components/hab/src/cli_v4/pkg/sign.rs new file mode 100644 index 0000000000..6365303eed --- /dev/null +++ b/components/hab/src/cli_v4/pkg/sign.rs @@ -0,0 +1,65 @@ +// Implementation of `hab pkg sign` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::{crypto, + crypto::keys::KeyCache, + origin::Origin}; + +use habitat_common::{cli::clap_validators::{FileExistsValueParser, + HabOriginValueParser}, + cli_config::CliConfig, + ui::UI}; + +use crate::{cli_v4::utils::CacheKeyPath, + command::pkg::sign, + error::{Error as HabError, + Result as HabResult}}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgSignOptions { + /// Origin key used to create signature + #[arg(name = "ORIGIN", long = "origin", env=crate::ORIGIN_ENVVAR, value_parser = HabOriginValueParser)] + origin: Option, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// A path to a source archive file (ex: /home/acme-redis-3.0.7-21120102031201.tar.xz) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, + + /// The destination path to the signed Habitat Artifact (ex: + /// /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "DEST")] + dest: PathBuf, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgSignOptions { + pub(crate) fn do_sign(&self, ui: &mut UI) -> HabResult<()> { + let origin = match &self.origin { + Some(origin) => origin.clone(), + None => { + CliConfig::load()?.origin.ok_or_else(|| { + HabError::CryptoCLI("No origin specified".to_string()) + })? + } + }; + + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + let key = key_cache.latest_secret_origin_signing_key(&origin)?; + sign::start(ui, + &key, + &Into::::into(self.source.clone()), + &self.dest) + } +} diff --git a/components/hab/src/cli_v4/pkg/uninstall.rs b/components/hab/src/cli_v4/pkg/uninstall.rs new file mode 100644 index 0000000000..1aef323a22 --- /dev/null +++ b/components/hab/src/cli_v4/pkg/uninstall.rs @@ -0,0 +1,81 @@ +// Implementation of `hab pkg uninstall` command +use clap_v4 as clap; + +use clap::{ArgAction, + Parser}; + +use habitat_core::{fs::FS_ROOT_PATH, + package::PackageIdent}; + +use habitat_common::{cli::clap_validators::HabPkgIdentValueParser, + ui::UI}; + +use crate::{command::pkg::{uninstall, + uninstall::UninstallHookMode, + ExecutionStrategy, + Scope}, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgUninstallOptions { + /// A package identifier (ex: core/redis, core/busybox-static/1.42.2) + #[arg(name = "PKG_IDENT", value_parser = HabPkgIdentValueParser::simple())] + pkg_ident: PackageIdent, + + /// Just show what would be uninstalled, don't actually do it + #[arg(name = "DRYRUN", short = 'd', long = "dryrun", action = ArgAction::SetTrue)] + dryrun: bool, + + /// Only keep this number of latest packages uninstalling all others. + #[arg(name = "KEEP_LATEST", long = "keep-latest")] + keep_latest: Option, + + /// Identifier of one or more packages that should not be uninstalled. (ex: core/redis, + /// core/busybox-static/1.42.2/21120102031201) + #[arg(name = "EXCLUDE", long = "exclude")] + exclude: Vec, + + /// Don't uninstall dependencies + #[arg(name = "NO_DEPS", long = "no-deps")] + no_deps: bool, + + /// Do not run any uninstall hooks + #[arg(name = "IGNORE_UNINSTALL_HOOK", long = "ignore-uninstall-hook")] + ignore_uninstall_hook: bool, +} + +impl PkgUninstallOptions { + pub(crate) async fn do_uninstall(&self, ui: &mut UI) -> HabResult<()> { + let exec_strategy = if self.dryrun { + ExecutionStrategy::DryRun + } else { + ExecutionStrategy::Run + }; + + let uninstall_mode = self.keep_latest.into(); + + let scope = if self.no_deps { + Scope::Package + } else { + Scope::PackageAndDependencies + }; + + let uninstall_hook_mode = if self.ignore_uninstall_hook { + UninstallHookMode::Ignore + } else { + UninstallHookMode::default() + }; + + uninstall::start(ui, + &self.pkg_ident, + &FS_ROOT_PATH, + exec_strategy, + uninstall_mode, + scope, + &self.exclude, + uninstall_hook_mode).await + } +} diff --git a/components/hab/src/cli_v4/pkg/upload.rs b/components/hab/src/cli_v4/pkg/upload.rs new file mode 100644 index 0000000000..f22b134d2a --- /dev/null +++ b/components/hab/src/cli_v4/pkg/upload.rs @@ -0,0 +1,83 @@ +// Implementation of `hab pkg upload` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::{ArgAction, + Parser}; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use habitat_core::{crypto::keys::KeyCache, + ChannelIdent}; + +use habitat_api_client::BuildOnUpload; + +use crate::{cli_v4::utils::{AuthToken, + BldrUrl, + CacheKeyPath}, + command::pkg::upload, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgUploadOptions { + #[command(flatten)] + bldr_url: BldrUrl, + + #[command(flatten)] + auth_token: AuthToken, + + /// Optional additional release channel to upload package to. Packages are always uploaded + /// to `unstable`, regardless of the value of this option + #[arg(name = "CHANNEL", short = 'c', long = "channel")] + channel: Option, + + /// Skips checking availability of package and force uploads, potentially overwriting a + /// stored copy of a package. (default: false) + #[arg(name = "FORCE", long = "force", action = ArgAction::SetTrue)] + force: bool, + + /// Disable auto-build for all packages in this upload + #[arg(name = "NO_BUILD", long = "no-build", action = ArgAction::SetTrue)] + no_build: bool, + + // TODO: Move to semantic PathBuf after CLAP-v2 support is removed kept due to Clap V2 quirk + /// One or more filepaths to a Habitat Artifact (ex: + /// /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "HART_FILE", required = true, value_parser = FileExistsValueParser)] + hart_file: Vec, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgUploadOptions { + pub(crate) async fn do_upload(&self, ui: &mut UI) -> HabResult<()> { + let auth_token = self.auth_token.from_cli_or_config()?; + + let auto_build = if self.no_build { + BuildOnUpload::Disable + } else { + BuildOnUpload::PackageDefault + }; + + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + + for hart_file in &self.hart_file { + upload::start(ui, + &self.bldr_url.to_string(), + &self.channel, + &auth_token, + &Into::::into(hart_file.clone()), + self.force, + auto_build, + &key_cache).await?; + } + Ok(()) + } +} diff --git a/components/hab/src/cli_v4/pkg/verify.rs b/components/hab/src/cli_v4/pkg/verify.rs new file mode 100644 index 0000000000..9a8d24816e --- /dev/null +++ b/components/hab/src/cli_v4/pkg/verify.rs @@ -0,0 +1,40 @@ +// Implementation of `hab pkg verify` command + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; + +use habitat_core::{crypto, + crypto::keys::KeyCache}; + +use habitat_common::{cli::clap_validators::FileExistsValueParser, + ui::UI}; + +use crate::{cli_v4::utils::CacheKeyPath, + command::pkg::verify, + error::Result as HabResult}; + +#[derive(Debug, Clone, Parser)] +#[command(arg_required_else_help = true, + help_template = "{name} {version} {author-section} {about-section} \n{usage-heading} \ + {usage}\n\n{all-args}\n")] +pub(crate) struct PkgVerifyOptions { + // TODO: Move to semantic PathBuf once Clap-v2 is removed + /// A path to a Habitat Artifact (ex: /home/acme-redis-3.0.7-21120102031201-x86_64-linux.hart) + #[arg(name = "SOURCE", value_parser = FileExistsValueParser)] + source: String, + + #[command(flatten)] + cache_key_path: CacheKeyPath, +} + +impl PkgVerifyOptions { + pub(super) fn do_verify(&self, ui: &mut UI) -> HabResult<()> { + crypto::init()?; + let key_cache = KeyCache::new::((&self.cache_key_path).into()); + + verify::start(ui, &Into::::into(self.source.clone()), &key_cache) + } +} diff --git a/components/hab/src/cli_v4/utils.rs b/components/hab/src/cli_v4/utils.rs new file mode 100644 index 0000000000..7db5e0aeff --- /dev/null +++ b/components/hab/src/cli_v4/utils.rs @@ -0,0 +1,306 @@ +// Utilities that are used by v4 macros +// +// Note we are duplicating this functionality because trivially using +// `cfg_attr(feature = "v4"),...]` is not easy to make work with existing code. Eventually this +// will be the only `util` left (hope so) + +use clap_v4 as clap; + +use std::path::PathBuf; + +use clap::Parser; +use lazy_static::lazy_static; +use url::Url; + +use habitat_common::cli_config::CliConfig; + +use habitat_core::{crypto::CACHE_KEY_PATH_ENV_VAR, + env as hcore_env, + fs::CACHE_KEY_PATH, + url::{BLDR_URL_ENVVAR, + DEFAULT_BLDR_URL}, + AUTH_TOKEN_ENVVAR}; + +use crate::error::{Error as HabError, + Result as HabResult}; + +lazy_static! { + pub(crate) static ref CACHE_KEY_PATH_DEFAULT: String = + CACHE_KEY_PATH.to_string_lossy().to_string(); +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct CacheKeyPath { + /// Cache for creating and searching for encryption keys + #[arg(long = "cache-key-path", + env = CACHE_KEY_PATH_ENV_VAR, + default_value = &*CACHE_KEY_PATH_DEFAULT)] + pub(crate) cache_key_path: PathBuf, +} + +impl From for CacheKeyPath { + fn from(cache_key_path: PathBuf) -> Self { Self { cache_key_path } } +} + +impl From<&CacheKeyPath> for PathBuf { + fn from(cache_key_path: &CacheKeyPath) -> PathBuf { cache_key_path.cache_key_path.clone() } +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct BldrUrl { + // TODO:agadgil: Use the Url Validator + /// Specify an alternate Builder endpoint. + #[arg(name = "BLDR_URL", short = 'u', long = "url")] + bldr_url: Option, +} + +impl BldrUrl { + // + pub(crate) fn to_string(&self) -> String { + if let Some(url) = &self.bldr_url { + url.to_string() + } else { + match hcore_env::var(BLDR_URL_ENVVAR) { + Ok(v) => v, + Err(_) => { + // Okay to unwrap it never returns Err!! + match CliConfig::load().unwrap().bldr_url { + Some(v) => v, + None => DEFAULT_BLDR_URL.to_string(), + } + } + } + } + } +} + +#[derive(Debug, Clone, Parser)] +pub(crate) struct AuthToken { + // TODO: Add Validator for this? + /// Authentication token for Builder. + #[arg(name = "AUTH_TOKEN", short = 'z', long = "auth")] + auth_token: Option, +} + +impl AuthToken { + // This function returns a result. Use this when `auth_token` is required. Either as a command + // line option or env or from config. + pub(crate) fn from_cli_or_config(&self) -> HabResult { + if let Some(auth_token) = &self.auth_token { + Ok(auth_token.clone()) + } else { + match hcore_env::var(AUTH_TOKEN_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + CliConfig::load()?.auth_token.ok_or_else(|| { + HabError::ArgumentError("No auth token \ + specified" + .into()) + }) + } + } + } + } + + // This function returns an `Option`, so if there is any "error" reading from config or env is + // not set simply returns a None. + pub(crate) fn try_from_cli_or_config(&self) -> Option { + match self.from_cli_or_config() { + Ok(v) => Some(v), + Err(_) => None, + } + } +} + +#[cfg(test)] +mod tests { + mod auth_token { + + use crate::cli_v4::utils::AuthToken; + + use clap_v4 as clap; + + use clap::Parser; + + habitat_core::locked_env_var!(HAB_AUTH_TOKEN, locked_auth_token); + + #[derive(Debug, Clone, Parser)] + struct TestAuthToken { + #[command(flatten)] + a: AuthToken, + } + + #[test] + fn required_env_no_cli_success() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_ok(), "{:#?}", auth_token.err().unwrap()); + } + + #[test] + fn required_no_env_cli_success() { + let env_var = locked_auth_token(); + env_var.unset(); + + let args = ["test-auth-token", "--auth", "foo-bar"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + } + + #[test] + fn required_no_env_no_cli_error() { + let env_var = locked_auth_token(); + env_var.unset(); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_err(), "{:#?}", auth_token.ok().unwrap()); + } + + #[test] + fn required_empty_env_no_cli_error() { + let env_var = locked_auth_token(); + env_var.set(""); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.from_cli_or_config(); + assert!(auth_token.is_err(), "{:#?}", auth_token.ok().unwrap()); + } + #[test] + fn optional_empty_env_no_cli_none() { + let env_var = locked_auth_token(); + env_var.set(""); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert!(auth_token.is_none(), "{:#?}", auth_token.unwrap()); + } + + #[test] + fn tok_optional_from_env_no_cli_some() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert_eq!(Some("env-auth-token".to_string()), + auth_token, + "{:#?}", + auth_token); + } + + #[test] + fn optional_no_env_from_cli_some() { + let env_var = locked_auth_token(); + env_var.set("env-auth-token"); + + let args = ["test-auth-token", "--auth", "foo-bar"]; + let result = TestAuthToken::try_parse_from(args); + assert!(result.is_ok(), "{:?}", result.err().unwrap()); + + let test_auth_token = result.unwrap(); + let auth_token = test_auth_token.a.try_from_cli_or_config(); + assert_eq!(Some("foo-bar".to_string()), auth_token, "{:#?}", auth_token); + } + } + + mod bldr_url { + + use crate::cli_v4::utils::{BldrUrl, + DEFAULT_BLDR_URL}; + + use clap_v4 as clap; + + use clap::Parser; + + habitat_core::locked_env_var!(HAB_BLDR_URL, locked_bldr_url); + + #[derive(Debug, Clone, Parser)] + struct TestBldrUrl { + #[command(flatten)] + u: BldrUrl, + } + + #[test] + fn no_env_no_cli_default() { + let env_var = locked_bldr_url(); + env_var.unset(); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), DEFAULT_BLDR_URL, "{:#?}", bldr_url); + } + + #[test] + fn empty_env_no_cli_default() { + let env_var = locked_bldr_url(); + env_var.set(""); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), DEFAULT_BLDR_URL, "{:#?}", bldr_url); + } + + #[test] + fn env_cli_passed_value() { + let test_bldr_url_val = "https://test.bldr.habitat.sh/"; + let cli_bldr_url_val = "https://cli.bldr.habitat.sh/"; + let env_var = locked_bldr_url(); + env_var.set(test_bldr_url_val); + + let args = ["test-bldr-url", "--url", cli_bldr_url_val]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), cli_bldr_url_val, "{:#?}", bldr_url); + } + + #[test] + fn env_no_cli_env_value() { + let test_bldr_url_val = "https://test.bldr.habitat.sh/"; + let env_var = locked_bldr_url(); + env_var.set(test_bldr_url_val); + + let args = ["test-bldr-url"]; + let result = TestBldrUrl::try_parse_from(args); + assert!(result.is_ok(), "{:#?}", result.err().unwrap()); + + let test_bldr_url = result.unwrap(); + let bldr_url = test_bldr_url.u.to_string(); + assert_eq!(bldr_url.as_str(), test_bldr_url_val, "{:#?}", bldr_url); + } + } +} diff --git a/components/hab/src/command/launcher.rs b/components/hab/src/command/launcher.rs index 7bbc325edb..67561894b7 100644 --- a/components/hab/src/command/launcher.rs +++ b/components/hab/src/command/launcher.rs @@ -1,5 +1,4 @@ -use crate::{cli::hab::sup::SupRun, - command::sup::{SUP_CMD, +use crate::{command::sup::{SUP_CMD, SUP_CMD_ENVVAR, SUP_PKG_IDENT}, common::ui::UI, @@ -12,14 +11,19 @@ use crate::{cli::hab::sup::SupRun, os::process, package::PackageIdent}, VERSION}; + use std::{ffi::OsString, path::PathBuf, str::FromStr}; +#[cfg(feature = "v2")] +use crate::cli::hab::sup::SupRun; + const LAUNCH_CMD: &str = "hab-launch"; const LAUNCH_CMD_ENVVAR: &str = "HAB_LAUNCH_BINARY"; const LAUNCH_PKG_IDENT: &str = "core/hab-launcher"; +#[cfg(feature = "v2")] pub async fn start(ui: &mut UI, sup_run: SupRun, args: &[OsString]) -> Result<()> { init()?; let channel = sup_run.shared_load.channel; diff --git a/components/hab/src/command/origin/key/export.rs b/components/hab/src/command/origin/key/export.rs index e9d6ceaf87..1b3d624ab1 100644 --- a/components/hab/src/command/origin/key/export.rs +++ b/components/hab/src/command/origin/key/export.rs @@ -1,5 +1,5 @@ -use crate::{cli::KeyType, - error::Result}; +use crate::{error::Result, + key_type::KeyType}; use habitat_core::{crypto::keys::{KeyCache, KeyFile}, origin::Origin}; diff --git a/components/hab/src/command/pkg/download.rs b/components/hab/src/command/pkg/download.rs index 1f4c712f80..7704a8110b 100644 --- a/components/hab/src/command/pkg/download.rs +++ b/components/hab/src/command/pkg/download.rs @@ -31,7 +31,8 @@ use std::{collections::{HashMap, HashSet}, fs::DirBuilder, path::{Path, - PathBuf}}; + PathBuf}, + str::FromStr}; use crate::{api_client::{self, retry_builder_api, @@ -78,6 +79,43 @@ pub struct PackageSetFile { pub targets: HashMap>, } +// TODO: Remove this clippy allow once `v2` support is removed. +#[allow(dead_code)] +impl PackageSetFile { + // Get Package Sets from the `toml` data. Following validations are performed - + // format_version is 1 + pub(crate) fn to_package_sets(&self) -> Result> { + match self.format_version { + Some(version) => { + if version != 1 { + Err(Error::PackageSetParseError(format!("format_version \ + invalid, only \ + version 1 allowed \ + ({} provided", + self.format_version + .unwrap()))) + } else { + let mut sets = vec![]; + for (target, pkg_sets) in &self.targets { + for pkg_set in pkg_sets { + let mut idents = vec![]; + for package in &pkg_set.packages { + let ident = PackageIdent::from_str(package).map_err(Error::from)?; + idents.push(ident); + } + sets.push(PackageSet { target: *target, + channel: pkg_set.channel.clone(), + idents }); + } + } + Ok(sets) + } + } + None => Err(Error::PackageSetParseError("format_version missing!".to_string())), + } + } +} + #[derive(Debug, Deserialize)] pub struct PackageSetValue { pub channel: ChannelIdent, diff --git a/components/hab/src/command/pkg/list.rs b/components/hab/src/command/pkg/list.rs index de14bfbf83..863c79db1a 100644 --- a/components/hab/src/command/pkg/list.rs +++ b/components/hab/src/command/pkg/list.rs @@ -3,7 +3,10 @@ use crate::{error::Result, FS_ROOT_PATH}, package::{list, PackageIdent}}}; + +#[cfg(feature = "v2")] use clap::ArgMatches; + use std::str::FromStr; /// There are three options for what we can list: @@ -18,6 +21,7 @@ pub enum ListingType { Ident(PackageIdent), } +#[cfg(feature = "v2")] /// Convert a set of command line options into a ListingType impl<'a> From<&'a ArgMatches<'a>> for ListingType { /// Convert clap options into a listing type. diff --git a/components/hab/src/command/pkg/uninstall.rs b/components/hab/src/command/pkg/uninstall.rs index e104e3f328..165dc5d4ad 100644 --- a/components/hab/src/command/pkg/uninstall.rs +++ b/components/hab/src/command/pkg/uninstall.rs @@ -3,7 +3,6 @@ mod uninstall_impl; use super::{ExecutionStrategy, Scope}; use crate::error::Result; -use clap::ArgMatches; use habitat_common::ui::UI; use habitat_core::package::PackageIdent; use std::path::Path; @@ -19,6 +18,20 @@ pub enum UninstallMode { KeepLatest(usize), } +impl From> for UninstallMode { + fn from(keep_latest: Option) -> Self { + match keep_latest { + Some(keep_latest) => Self::KeepLatest(keep_latest), + None => Self::Single, + } + } +} + +// TODO: Remove after feature `v2` is removed +#[cfg(feature = "v2")] +use clap::ArgMatches; + +#[cfg(feature = "v2")] impl<'a> From<&'a ArgMatches<'a>> for UninstallMode { fn from(m: &ArgMatches) -> Self { m.value_of("KEEP_LATEST") diff --git a/components/hab/src/error.rs b/components/hab/src/error.rs index 37d5c792f2..9dda3a46a8 100644 --- a/components/hab/src/error.rs +++ b/components/hab/src/error.rs @@ -34,7 +34,10 @@ pub enum Error { CannotRemovePackage(hcore::package::PackageIdent, usize), CommandNotFoundInPkg((String, String)), CliConfig(cli_config::Error), + + #[cfg(feature = "v2")] ConfigOpt(configopt::Error), + CryptoCLI(String), CtlClient(SrvClientError), CtrlcError(ctrlc::Error), @@ -112,7 +115,11 @@ impl fmt::Display for Error { c, p) } Error::CliConfig(ref err) => format!("{}", err), + + // TODO: Remove after `v2` is removed + #[cfg(feature = "v2")] Error::ConfigOpt(ref err) => format!("{}", err), + Error::CryptoCLI(ref e) => e.to_string(), Error::CtlClient(ref e) => e.to_string(), Error::CtrlcError(ref err) => format!("{}", err), @@ -234,6 +241,7 @@ impl From for Error { fn from(err: cli_config::Error) -> Self { Error::CliConfig(err) } } +#[cfg(feature = "v2")] impl From for Error { fn from(err: configopt::Error) -> Self { Error::ConfigOpt(err) } } diff --git a/components/hab/src/key_type.rs b/components/hab/src/key_type.rs new file mode 100644 index 0000000000..a9f4112b3c --- /dev/null +++ b/components/hab/src/key_type.rs @@ -0,0 +1,35 @@ +use std::str::FromStr; + +use serde::{Deserialize, + Serialize}; + +//////////////////////////////////////////////////////////////////////// + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Deserialize, Serialize)] +pub enum KeyType { + Public, + Secret, +} + +impl FromStr for KeyType { + type Err = crate::error::Error; + + fn from_str(value: &str) -> std::result::Result { + match value { + "public" => Ok(Self::Public), + "secret" => Ok(Self::Secret), + _ => Err(Self::Err::KeyTypeParseError(value.to_string())), + } + } +} + +impl std::fmt::Display for KeyType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + KeyType::Public => write!(f, "public"), + KeyType::Secret => write!(f, "secret"), + } + } +} + +//////////////////////////////////////////////////////////////////////// diff --git a/components/hab/src/lib.rs b/components/hab/src/lib.rs index b1ffb8b6ec..f9e05b62a3 100644 --- a/components/hab/src/lib.rs +++ b/components/hab/src/lib.rs @@ -6,7 +6,15 @@ use habitat_core as hcore; use habitat_sup_client as sup_client; use habitat_sup_protocol as protocol; +#[cfg(feature = "v2")] pub mod cli; + +#[cfg(feature = "v4")] +mod cli_v4; + +#[cfg(feature = "v4")] +pub use cli_v4::cli_driver; + pub mod command; pub mod error; mod exec; @@ -17,5 +25,13 @@ pub const PRODUCT: &str = "hab"; pub const VERSION: &str = include_str!(concat!(env!("OUT_DIR"), "/VERSION")); pub const ORIGIN_ENVVAR: &str = "HAB_ORIGIN"; pub const BLDR_URL_ENVVAR: &str = "HAB_BLDR_URL"; +pub const AFTER_HELP: &str = + "\nALIASES:\n apply Alias for: 'config apply'\n install Alias for: 'pkg \ + install'\n run Alias for: 'sup run'\n setup Alias for: 'cli setup'\n \ + start Alias for: 'svc start'\n stop Alias for: 'svc stop'\n term \ + Alias for: 'sup term'\n"; pub use crate::hcore::AUTH_TOKEN_ENVVAR; + +// TODO:agadgil: When Clap v2 support is gone, this should become `pub(crate)` +pub mod key_type; diff --git a/components/hab/src/main.rs b/components/hab/src/main.rs old mode 100755 new mode 100644 index 52d8c0f340..0b5b7b1649 --- a/components/hab/src/main.rs +++ b/components/hab/src/main.rs @@ -1,2176 +1,14 @@ -#![recursion_limit = "128"] +#[cfg(feature = "v2")] +mod main_v2; -use clap::{value_t, - ArgMatches, - ErrorKind as ClapErrorKind, - Shell}; -use configopt::{ConfigOpt, - Error as ConfigOptError}; -use futures::stream::StreamExt; -#[cfg(any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),))] -use hab::cli::hab::pkg::ExportCommand as PkgExportCommand; -use hab::{cli::{self, - gateway_util, - hab::{license::License, - origin::{Rbac, - RbacSet, - RbacShow}, - pkg::PkgExec, - svc::{self, - BulkLoad as SvcBulkLoad, - Load as SvcLoad, - Svc}, - util::{bldr_auth_token_from_args_env_or_load, - bldr_url_from_args_env_load_or_default}, - Hab, - Origin, - Pkg}, - parse_optional_arg, - KeyType}, - command::{self, - pkg::{download::{PackageSet, - PackageSetFile}, - list::ListingType, - uninstall::UninstallHookMode}}, - error::{Error, - Result}, - license, - scaffolding, - AUTH_TOKEN_ENVVAR, - BLDR_URL_ENVVAR, - ORIGIN_ENVVAR, - PRODUCT, - VERSION}; -use habitat_api_client::BuildOnUpload; -use habitat_common::{self as common, - cli::key_cache_from_matches, - cli_config::CliConfig, - command::package::install::{InstallHookMode, - InstallMode, - InstallSource, - LocalPackageUsage}, - types::ResolvedListenCtlAddr, - ui::{self, - Status, - UIWriter, - UI}, - FeatureFlag}; -use habitat_core::{crypto::{init, - keys::{Key, - KeyCache}}, - env::{self as henv, - Config as _}, - fs::{cache_artifact_path, - FS_ROOT_PATH}, - os::process::ShutdownTimeout, - package::{target, - PackageIdent, - PackageTarget}, - service::ServiceGroup, - url::default_bldr_url, - ChannelIdent}; -use habitat_sup_client::{SrvClient, - SrvClientError}; -use habitat_sup_protocol::{self as sup_proto, - codec::*, - net::ErrCode, - types::*}; -use lazy_static::lazy_static; -use log::{debug, - warn}; -use std::{collections::HashMap, - convert::TryFrom, - env, - ffi::OsString, - fs::File, - io::{self, - prelude::*, - Read}, - path::{Path, - PathBuf}, - process, - result, - str::FromStr, - string::ToString, - thread}; -use tabwriter::TabWriter; - -#[cfg(not(target_os = "macos"))] -use hab::cli::hab::sup::{HabSup, - Secret, - Sup}; -#[cfg(not(target_os = "macos"))] -use habitat_core::tls::ctl_gateway as ctl_gateway_tls; -#[cfg(not(target_os = "macos"))] -use webpki::types::DnsName; - -/// Makes the --org CLI param optional when this env var is set -const HABITAT_ORG_ENVVAR: &str = "HAB_ORG"; -/// Makes the --user CLI param optional when this env var is set -const HABITAT_USER_ENVVAR: &str = "HAB_USER"; - -lazy_static! { - static ref STATUS_HEADER: Vec<&'static str> = { - vec!["package", - "type", - "desired", - "state", - "elapsed (s)", - "pid", - "group",] - }; -} +mod main_v4; #[tokio::main] async fn main() { - env_logger::init(); - let mut ui = UI::default_with_env(); - let flags = FeatureFlag::from_env(&mut ui); - if let Err(e) = start(&mut ui, flags).await { - let exit_code = e.exit_code(); - ui.fatal(e).unwrap(); - std::process::exit(exit_code) - } -} - -#[allow(clippy::cognitive_complexity)] -async fn start(ui: &mut UI, feature_flags: FeatureFlag) -> Result<()> { - // We parse arguments with configopt in a separate thread to eliminate - // possible stack overflow crashes at runtime. OSX or a debug Windows build, - // for instance, will crash with our large tree. This is a known issue: - // https://github.com/kbknapp/clap-rs/issues/86 - let child = thread::Builder::new().stack_size(8 * 1024 * 1024) - .spawn(Hab::try_from_args_with_configopt) - .unwrap(); - let hab = child.join().unwrap(); - - if let Ok(Hab::License(License::Accept)) = hab { - license::accept_license(ui)?; - return Ok(()); - } - - // Allow checking version information and displaying command help without accepting the license. - // TODO (DM): To prevent errors in discrepancy between the structopt and cli versions only do - // this when the license has not yet been accepted. When we switch fully to structopt this can - // be completely removed and we should just call `Hab::from_args_with_configopt` which will - // automatically result in this functionality. - if !license::check_for_license_acceptance().unwrap_or_default() - .accepted() - { - if let Err(ConfigOptError::Clap(e)) = &hab { - if e.kind == ClapErrorKind::VersionDisplayed || e.kind == ClapErrorKind::HelpDisplayed { - e.exit() - } - } - } - - // We must manually detect a supervisor version check and call the `hab-sup` binary to get the - // true Supervisor version. - // TODO (DM): This is an ugly consequence of having `hab sup` subcommands handled by both the - // `hab` binary and the `hab-sup` binary. Potential fixes: - // 1. Handle all `hab sup` subcommands with the `hab-sup` binary - // 2. Have a dedicated subcommand for commands handled by the `hab-sup` binary - let mut args = env::args(); - if matches!((args.next().unwrap_or_default().as_str(), - args.next().unwrap_or_default().as_str(), - args.next().unwrap_or_default().as_str()), - (_, "sup", "--version") | (_, "sup", "-V")) - { - return command::sup::start(ui, &args_after_first(2)).await; - } - - license::check_for_license_acceptance_and_prompt(ui)?; - - // Parse and handle commands which have been migrated to use `structopt` here. Once everything - // is migrated to use `structopt` the parsing logic below this using clap directly will be gone. - match hab { - Ok(hab) => { - match hab { - Hab::Origin(Origin::Rbac(action)) => { - match action { - Rbac::Set(rbac_set) => { - return sub_origin_member_role_set(ui, rbac_set).await; - } - Rbac::Show(rbac_show) => { - return sub_origin_member_role_show(ui, rbac_show).await; - } - } - } - #[cfg(not(target_os = "macos"))] - Hab::Run(sup_run) => { - ui.warn("'hab run' as an alias for 'hab sup run' is deprecated. Please \ - update your automation and processes accordingly.")?; - return command::launcher::start(ui, sup_run, &args_after_first(1)).await; - } - #[cfg(any(target_os = "macos", - any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),)))] - Hab::Studio(studio) => { - return command::studio::enter::start(ui, studio.args()).await; - } - #[cfg(not(target_os = "macos"))] - Hab::Sup(sup) => { - match sup { - HabSup::Sup(sup) => { - // These commands are handled by the `hab-sup` or `hab-launch` binaries. - // We need to pass the subcommand that was issued to the underlying - // binary. It is a bit hacky, but to do that we strip off the `hab sup` - // command prefix and pass the rest of the args to underlying binary. - let args = args_after_first(2); - match sup { - #[cfg(any( - all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"), - ))] - Sup::Bash | Sup::Sh => { - return command::sup::start(ui, &args).await; - } - Sup::Term => { - return command::sup::start(ui, &args).await; - } - Sup::Run(sup_run) => { - return command::launcher::start(ui, sup_run, &args).await; - } - } - } - HabSup::Depart { member_id, - remote_sup, } => { - return sub_sup_depart(member_id, remote_sup.inner()).await; - } - HabSup::Secret(secret) => { - match secret { - Secret::Generate => return sub_sup_secret_generate(), - Secret::GenerateTls { subject_alternative_name, - path, } => { - return sub_sup_secret_generate_key(&subject_alternative_name.dns_name()?, - path) - } - } - } - HabSup::Status { pkg_ident, - remote_sup, } => { - ui.warn("'hab sup status' as an alias for 'hab svc status' is \ - deprecated. Please update your automation and processes \ - accordingly.")?; - return sub_svc_status(pkg_ident, remote_sup.inner()).await; - } - HabSup::Restart { remote_sup } => { - return sub_sup_restart(remote_sup.inner()).await; - } - } - } - Hab::Svc(svc) => { - match svc { - Svc::BulkLoad(svc_bulk_load) => { - if feature_flags.contains(FeatureFlag::SERVICE_CONFIG_FILES) { - return sub_svc_bulk_load(svc_bulk_load).await; - } else { - return Err(Error::ArgumentError(String::from("`hab svc bulkload` is only available when `HAB_FEAT_SERVICE_CONFIG_FILES` is set"))); - } - } - Svc::Load(svc_load) => { - return sub_svc_load(svc_load).await; - } - Svc::Update(svc_update) => return sub_svc_update(svc_update).await, - Svc::Status(svc_status) => { - return sub_svc_status(svc_status.pkg_ident, - svc_status.remote_sup.inner()).await; - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - #[cfg(not(target_os = "macos"))] - Hab::Term => { - ui.warn("'hab term' as an alias for 'hab sup term' is deprecated. Please \ - update your automation and processes accordingly.")?; - return command::sup::start(ui, &args_after_first(1)).await; - } - Hab::Pkg(pkg) => { - #[allow(clippy::collapsible_match)] - match pkg { - // package export is not available on platforms that have no package support - #[cfg(any(all(target_os = "linux", - any(target_arch = "x86_64", target_arch = "aarch64")), - all(target_os = "windows", target_arch = "x86_64"),))] - Pkg::Export(export) => { - match export { - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Container(args) => { - return command::pkg::export::container::start(ui, &args.args).await; - } - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Docker(args) => { - ui.warn("'hab pkg export docker' is now a deprecated alias \ - for 'hab pkg export container'. Please update your \ - automation and processes accordingly.")?; - return command::pkg::export::container::start(ui, &args.args).await; - } - #[cfg(any(target_os = "linux", target_os = "windows"))] - PkgExportCommand::Tar(args) => { - return command::pkg::export::tar::start(ui, &args.args).await; - } - } - } - Pkg::Exec(PkgExec { pkg_ident, - cmd, - args, }) => { - return command::pkg::exec::start(&pkg_ident.pkg_ident(), - cmd, - &args.args); - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - _ => { - // All other commands will be caught by the CLI parsing logic below. - } - } - } - Err(e @ ConfigOptError::ConfigGenerated(_) - | e @ ConfigOptError::ConfigFile(..) - | e @ ConfigOptError::Toml(..)) => e.exit(), - Err(_) => { - // Completely ignore all other errors. They will be caught by the CLI parsing logic - // below. - } - }; - - // Similar to the configopt parsing above We build the command tree in a - // separate thread to eliminate possible stack overflow crashes at runtime. - // See known issue:https://github.com/kbknapp/clap-rs/issues/86 - let cli_child = thread::Builder::new().stack_size(8 * 1024 * 1024) - .spawn(move || { - cli::get(feature_flags).get_matches_safe() - .unwrap_or_else(|e| { - e.exit(); - }) - }) - .unwrap(); - let app_matches = cli_child.join().unwrap(); - - match app_matches.subcommand() { - ("apply", Some(m)) => { - ui.warn("'hab apply' as an alias for 'hab config apply' is deprecated. Please \ - update your automation and processes accordingly.")?; - sub_svc_set(m).await? - } - ("cli", Some(matches)) => { - match matches.subcommand() { - ("setup", Some(m)) => sub_cli_setup(ui, m)?, - ("completers", Some(m)) => sub_cli_completers(m, feature_flags), - _ => unreachable!(), - } - } - ("config", Some(m)) => { - match m.subcommand() { - ("apply", Some(m)) => sub_svc_set(m).await?, - ("show", Some(m)) => sub_svc_config(m).await?, - _ => unreachable!(), - } - } - ("file", Some(m)) => { - match m.subcommand() { - ("upload", Some(m)) => sub_file_put(m).await?, - _ => unreachable!(), - } - } - ("install", Some(m)) => { - ui.warn("'hab install' as an alias for 'hab pkg install' is deprecated. Please \ - update your automation and processes accordingly.")?; - sub_pkg_install(ui, m, feature_flags).await? - } - ("origin", Some(matches)) => { - match matches.subcommand() { - ("invitations", Some(m)) => { - match m.subcommand() { - ("accept", Some(sc)) => sub_accept_origin_invitation(ui, sc).await?, - ("ignore", Some(sc)) => sub_ignore_origin_invitation(ui, sc).await?, - ("list", Some(sc)) => sub_list_user_invitations(ui, sc).await?, - ("pending", Some(sc)) => sub_list_pending_origin_invitations(ui, sc).await?, - ("send", Some(sc)) => sub_send_origin_invitation(ui, sc).await?, - ("rescind", Some(sc)) => sub_rescind_origin_invitation(ui, sc).await?, - _ => unreachable!(), - } - } - ("key", Some(m)) => { - match m.subcommand() { - ("download", Some(sc)) => sub_origin_key_download(ui, sc).await?, - ("export", Some(sc)) => sub_origin_key_export(sc)?, - ("generate", Some(sc)) => sub_origin_key_generate(ui, sc)?, - ("import", Some(sc)) => sub_origin_key_import(ui, sc)?, - ("upload", Some(sc)) => sub_origin_key_upload(ui, sc).await?, - _ => unreachable!(), - } - } - ("secret", Some(m)) => { - match m.subcommand() { - ("upload", Some(sc)) => sub_origin_secret_upload(ui, sc).await?, - ("delete", Some(sc)) => sub_origin_secret_delete(ui, sc).await?, - ("list", Some(sc)) => sub_origin_secret_list(ui, sc).await?, - _ => unreachable!(), - } - } - ("create", Some(m)) => sub_origin_create(ui, m).await?, - ("delete", Some(m)) => sub_origin_delete(ui, m).await?, - ("transfer", Some(m)) => sub_origin_transfer_ownership(ui, m).await?, - ("depart", Some(m)) => sub_origin_depart(ui, m).await?, - ("info", Some(m)) => sub_origin_info(ui, m).await?, - _ => unreachable!(), - } - } - ("bldr", Some(matches)) => { - match matches.subcommand() { - ("job", Some(m)) => { - match m.subcommand() { - ("start", Some(m)) => sub_bldr_job_start(ui, m).await?, - ("cancel", Some(m)) => sub_bldr_job_cancel(ui, m).await?, - ("promote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, true).await?, - ("demote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, false).await?, - ("status", Some(m)) => sub_bldr_job_status(ui, m).await?, - _ => unreachable!(), - } - } - ("channel", Some(m)) => { - match m.subcommand() { - ("create", Some(m)) => sub_bldr_channel_create(ui, m).await?, - ("destroy", Some(m)) => sub_bldr_channel_destroy(ui, m).await?, - ("list", Some(m)) => sub_bldr_channel_list(ui, m).await?, - ("promote", Some(m)) => sub_bldr_channel_promote(ui, m).await?, - ("demote", Some(m)) => sub_bldr_channel_demote(ui, m).await?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - ("pkg", Some(matches)) => { - match matches.subcommand() { - ("binds", Some(m)) => sub_pkg_binds(m)?, - ("binlink", Some(m)) => sub_pkg_binlink(ui, m)?, - ("build", Some(m)) => sub_pkg_build(ui, m, feature_flags).await?, - ("channels", Some(m)) => sub_pkg_channels(ui, m).await?, - ("config", Some(m)) => sub_pkg_config(m)?, - ("dependencies", Some(m)) => sub_pkg_dependencies(m)?, - ("download", Some(m)) => sub_pkg_download(ui, m, feature_flags).await?, - ("env", Some(m)) => sub_pkg_env(m)?, - ("hash", Some(m)) => sub_pkg_hash(m)?, - ("install", Some(m)) => sub_pkg_install(ui, m, feature_flags).await?, - ("list", Some(m)) => sub_pkg_list(m)?, - ("path", Some(m)) => sub_pkg_path(m)?, - ("provides", Some(m)) => sub_pkg_provides(m)?, - ("search", Some(m)) => sub_pkg_search(m).await?, - ("sign", Some(m)) => sub_pkg_sign(ui, m)?, - ("uninstall", Some(m)) => sub_pkg_uninstall(ui, m).await?, - ("upload", Some(m)) => sub_pkg_upload(ui, m).await?, - ("bulkupload", Some(m)) => sub_pkg_bulkupload(ui, m).await?, - ("delete", Some(m)) => sub_pkg_delete(ui, m).await?, - ("verify", Some(m)) => sub_pkg_verify(ui, m)?, - ("header", Some(m)) => sub_pkg_header(ui, m)?, - ("info", Some(m)) => sub_pkg_info(ui, m)?, - ("promote", Some(m)) => sub_pkg_promote(ui, m).await?, - ("demote", Some(m)) => sub_pkg_demote(ui, m).await?, - _ => unreachable!(), - } - } - ("plan", Some(matches)) => { - match matches.subcommand() { - ("init", Some(m)) => sub_plan_init(ui, m)?, - ("render", Some(m)) => sub_plan_render(ui, m)?, - _ => unreachable!(), - } - } - ("ring", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("export", Some(sc)) => sub_ring_key_export(sc)?, - ("import", Some(sc)) => sub_ring_key_import(ui, sc)?, - ("generate", Some(sc)) => sub_ring_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - ("svc", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("generate", Some(sc)) => sub_service_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - ("unload", Some(m)) => sub_svc_unload(m).await?, - ("start", Some(m)) => sub_svc_start(m).await?, - ("stop", Some(m)) => sub_svc_stop(m).await?, - _ => unreachable!(), - } - } - ("supportbundle", _) => sub_supportbundle(ui)?, - ("setup", Some(m)) => { - ui.warn("'hab setup' as an alias for 'hab cli setup' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_cli_setup(ui, m)? - } - ("start", Some(m)) => { - ui.warn("'hab start' as an alias for 'hab svc start' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_svc_start(m).await? - } - ("stop", Some(m)) => { - ui.warn("'hab stop' as an alias for 'hab svc stop' is deprecated. Please update \ - your automation and processes accordingly.")?; - sub_svc_stop(m).await? - } - ("user", Some(matches)) => { - match matches.subcommand() { - ("key", Some(m)) => { - match m.subcommand() { - ("generate", Some(sc)) => sub_user_key_generate(ui, sc)?, - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - _ => unreachable!(), - }; - Ok(()) -} - -fn sub_cli_setup(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::cli::setup::start(ui, &key_cache) -} - -fn sub_cli_completers(m: &ArgMatches<'_>, feature_flags: FeatureFlag) { - let shell = m.value_of("SHELL") - .expect("Missing Shell; A shell is required"); - - // TODO (CM): Interesting... the completions generated can depend - // on what feature flags happen to be enabled at the time you - // generated the completions - cli::get(feature_flags).gen_completions_to("hab", - shell.parse::().unwrap(), - &mut io::stdout()); -} - -async fn sub_origin_key_download(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN").parse()?; - let revision = m.value_of("REVISION"); - let with_secret = m.is_present("WITH_SECRET"); - let with_encryption = m.is_present("WITH_ENCRYPTION"); - let token = maybe_auth_token(m); - let url = bldr_url_from_matches(m)?; - let key_cache = key_cache_from_matches(m)?; - - command::origin::key::download::start(ui, - &url, - &origin, - revision, - with_secret, - with_encryption, - token.as_deref(), - &key_cache).await -} - -fn sub_origin_key_export(m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN").parse()?; - let key_type = KeyType::from_str(m.value_of("KEY_TYPE").unwrap_or("public"))?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::origin::key::export::start(&origin, key_type, &key_cache) -} - -fn sub_origin_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = origin_param_or_env(m)?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::origin::key::generate::start(ui, &origin, &key_cache) -} - -fn sub_origin_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let mut content = String::new(); - let key_cache = key_cache_from_matches(m)?; - init()?; - io::stdin().read_to_string(&mut content)?; - - // Trim the content to lose line feeds added by Powershell pipeline - command::origin::key::import::start(ui, content.trim(), &key_cache) -} - -async fn sub_origin_key_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let key_cache = key_cache_from_matches(m)?; - - init()?; - - match m.value_of("ORIGIN") { - Some(origin) => { - let origin = origin.parse()?; - // you can either specify files, or infer the latest key names - let with_secret = m.is_present("WITH_SECRET"); - command::origin::key::upload_latest::start(ui, - &url, - &token, - &origin, - with_secret, - &key_cache).await - } - None => { - let keyfile = Path::new(required_value_of(m, "PUBLIC_FILE")); - let secret_keyfile = m.value_of("SECRET_FILE").map(Path::new); - command::origin::key::upload::start(ui, &url, &token, keyfile, secret_keyfile).await - } - } -} - -async fn sub_origin_secret_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - let key = required_value_of(m, "KEY_NAME"); - let secret = required_value_of(m, "SECRET"); - let key_cache = key_cache_from_matches(m)?; - command::origin::secret::upload::start(ui, &url, &token, &origin, key, secret, &key_cache).await -} - -async fn sub_origin_secret_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - let key = required_value_of(m, "KEY_NAME"); - command::origin::secret::delete::start(ui, &url, &token, &origin, key).await -} - -async fn sub_origin_secret_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let origin = origin_param_or_env(m)?; - command::origin::secret::list::start(ui, &url, &token, &origin).await -} - -async fn sub_origin_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::create::start(ui, &url, &token, origin).await -} - -async fn sub_origin_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let to_json = m.is_present("TO_JSON"); - command::origin::info::start(ui, &url, &token, origin, to_json).await -} - -async fn sub_origin_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::delete::start(ui, &url, &token, origin).await -} - -async fn sub_origin_transfer_ownership(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let account = required_value_of(m, "NEW_OWNER_ACCOUNT"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::transfer::start(ui, &url, &token, origin, account).await -} - -async fn sub_origin_depart(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::depart::start(ui, &url, &token, origin).await -} - -async fn sub_accept_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::accept::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_ignore_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::ignore::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_list_user_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::list_user::start(ui, &url, &token).await -} - -async fn sub_list_pending_origin_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::list_pending_origin::start(ui, &url, origin, &token).await -} - -async fn sub_rescind_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitation_id = - required_value_of(m, "INVITATION_ID").parse() - .expect("INVITATION_ID should be valid at this point"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::rescind::start(ui, &url, origin, &token, invitation_id).await -} - -async fn sub_send_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = required_value_of(m, "ORIGIN"); - let invitee_account = required_value_of(m, "INVITEE_ACCOUNT"); - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - command::origin::invitations::send::start(ui, &url, origin, &token, invitee_account).await -} - -async fn sub_origin_member_role_show(ui: &mut UI, r: RbacShow) -> Result<()> { - let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; - let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; - command::origin::rbac::show_role::start(ui, - bldr_url, - r.origin.inner, - &auth_token, - &r.member_account, - r.to_json).await -} - -async fn sub_origin_member_role_set(ui: &mut UI, r: RbacSet) -> Result<()> { - let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; - let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; - command::origin::rbac::set_role::start(ui, - bldr_url, - r.origin.inner, - &auth_token, - &r.member_account, - r.role, - r.no_prompt).await -} - -fn sub_pkg_binlink(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let dest_dir = Path::new(required_value_of(m, "DEST_DIR")); - let force = m.is_present("FORCE"); - match m.value_of("BINARY") { - Some(binary) => { - command::pkg::binlink::start(ui, &ident, binary, dest_dir, &FS_ROOT_PATH, force) - } - None => { - command::pkg::binlink::binlink_all_in_pkg(ui, &ident, dest_dir, &FS_ROOT_PATH, force) - } - } -} - -/// Generate a (possibly empty) list of `Origin`s from the value of -/// the `HAB_ORIGIN_KEYS` environment variable / `--keys` argument. -fn hab_key_origins(m: &ArgMatches<'_>) -> Result> { - m.values_of("HAB_ORIGIN_KEYS") - .unwrap_or_default() - .map(|n| n.parse().map_err(Into::into)) - .collect() -} - -#[allow(unused_variables)] -async fn sub_pkg_build(ui: &mut UI, m: &ArgMatches<'_>, feature_flags: FeatureFlag) -> Result<()> { - let plan_context = required_value_of(m, "PLAN_CONTEXT"); - let root = m.value_of("HAB_STUDIO_ROOT"); - let src = m.value_of("SRC_PATH"); - let refresh_channel = m.value_of("REFRESH_CHANNEL"); - - let origins = hab_key_origins(m)?; - if !origins.is_empty() { - init()?; - let key_cache = key_cache_from_matches(m)?; - for origin in origins.iter() { - // Validate that a secret signing key is present on disk - // for each origin. - key_cache.latest_secret_origin_signing_key(origin)?; - } - } - - #[cfg(target_family = "unix")] - let native_package = if m.is_present("NATIVE_PACKAGE") { - if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { - return Err(Error::ArgumentError(String::from("`--native-package` is \ - only available when \ - `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ - is set"))); - } - true - } else { - false - }; - #[cfg(target_family = "windows")] - let native_package = false; - - let docker = m.is_present("DOCKER"); - let reuse = m.is_present("REUSE"); - - command::pkg::build::start(ui, - plan_context, - root, - src, - &origins, - native_package, - reuse, - docker, - refresh_channel).await -} - -fn sub_pkg_config(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - common::command::package::config::start(&ident, &*FS_ROOT_PATH)?; - Ok(()) -} - -fn sub_pkg_binds(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - common::command::package::binds::start(&ident, &*FS_ROOT_PATH)?; - Ok(()) -} - -fn sub_pkg_dependencies(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let scope = if m.is_present("TRANSITIVE") { - command::pkg::Scope::PackageAndDependencies - } else { - command::pkg::Scope::Package - }; - - let direction = if m.is_present("REVERSE") { - command::pkg::DependencyRelation::Supports - } else { - command::pkg::DependencyRelation::Requires - }; - command::pkg::dependencies::start(&ident, scope, direction, &FS_ROOT_PATH) -} - -async fn sub_pkg_download(ui: &mut UI, - m: &ArgMatches<'_>, - _feature_flags: FeatureFlag) - -> Result<()> { - let token = maybe_auth_token(m); - let url = bldr_url_from_matches(m)?; - let download_dir = download_dir_from_matches(m); - - // Construct flat file based inputs - let channel = channel_from_matches_or_default(m); - let target = target_from_matches(m)?; - - let install_sources = idents_from_matches(m)?; - - let mut package_sets = vec![PackageSet { target, - channel: channel.clone(), - idents: install_sources }]; - - let mut install_sources_from_file = idents_from_file_matches(ui, m, &channel, target)?; - package_sets.append(&mut install_sources_from_file); - package_sets.retain(|set| !set.idents.is_empty()); - - let verify = verify_from_matches(m); - let ignore_missing_seeds = ignore_missing_seeds_from_matches(m); - - init()?; - - command::pkg::download::start(ui, - &url, - PRODUCT, - VERSION, - &package_sets, - download_dir.as_ref(), - token.as_deref(), - verify, - ignore_missing_seeds).await?; - Ok(()) -} - -fn sub_pkg_env(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - command::pkg::env::start(&ident, &FS_ROOT_PATH) -} - -fn sub_pkg_hash(m: &ArgMatches<'_>) -> Result<()> { - init()?; - match m.value_of("SOURCE") { - Some(source) => { - // hash single file - command::pkg::hash::start(source) - } - None => { - // read files from stdin - let stdin = io::stdin(); - for line in stdin.lock().lines() { - let file = line?; - command::pkg::hash::start(file.trim_end())?; - } - Ok(()) - } - } -} - -async fn sub_pkg_uninstall(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let execute_strategy = if m.is_present("DRYRUN") { - command::pkg::ExecutionStrategy::DryRun - } else { - command::pkg::ExecutionStrategy::Run - }; - let mode = command::pkg::uninstall::UninstallMode::from(m); - let scope = if m.is_present("NO_DEPS") { - command::pkg::Scope::Package - } else { - command::pkg::Scope::PackageAndDependencies - }; - let excludes = excludes_from_matches(m); - let uninstall_hook_mode = if m.is_present("IGNORE_UNINSTALL_HOOK") { - UninstallHookMode::Ignore - } else { - UninstallHookMode::default() - }; - - command::pkg::uninstall::start(ui, - &ident, - &FS_ROOT_PATH, - execute_strategy, - mode, - scope, - &excludes, - uninstall_hook_mode).await -} - -async fn sub_bldr_channel_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - command::bldr::channel::create::start(ui, &url, &token, &origin, &channel).await -} - -async fn sub_bldr_channel_destroy(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - command::bldr::channel::destroy::start(ui, &url, &token, &origin, &channel).await -} - -async fn sub_bldr_channel_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let include_sandbox_channels = m.is_present("SANDBOX"); - command::bldr::channel::list::start(ui, &url, &origin, include_sandbox_channels).await -} - -async fn sub_bldr_channel_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let token = auth_token_param_or_env(m)?; - let source_channel = required_source_channel_from_matches(m); - let target_channel = required_target_channel_from_matches(m); - command::bldr::channel::promote::start(ui, - &url, - &token, - &origin, - &source_channel, - &target_channel).await -} - -async fn sub_bldr_channel_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let origin = origin_param_or_env(m)?; - let token = auth_token_param_or_env(m)?; - let source_channel = required_source_channel_from_matches(m); - let target_channel = required_target_channel_from_matches(m); - command::bldr::channel::demote::start(ui, - &url, - &token, - &origin, - &source_channel, - &target_channel).await -} - -#[allow(unused)] -async fn sub_bldr_job_start(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_cancel(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_promote_or_demote(ui: &mut UI, - m: &ArgMatches<'_>, - promote: bool) - -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -#[allow(unused)] -async fn sub_bldr_job_status(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - Err(Error::BuilderBuildFunctionsRemoved) -} - -fn sub_plan_init(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let name = m.value_of("PKG_NAME").map(String::from); - let origin = origin_param_or_env(m)?; - let minimal = m.is_present("MIN"); - let scaffolding_ident = if cfg!(windows) { - match m.value_of("SCAFFOLDING") { - Some(scaffold) => Some(PackageIdent::from_str(scaffold)?), - None => None, - } - } else { - scaffolding::scaffold_check(ui, m.value_of("SCAFFOLDING"))? - }; - - command::plan::init::start(ui, &origin, minimal, scaffolding_ident, name) -} - -fn sub_plan_render(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let template_path = required_value_of(m, "TEMPLATE_PATH"); - let template_path = Path::new(template_path); - - let default_toml_path = required_value_of(m, "DEFAULT_TOML"); - let default_toml_path = Path::new(default_toml_path); - - let user_toml_path = m.value_of("USER_TOML").map(Path::new); - - let mock_data_path = m.value_of("MOCK_DATA").map(Path::new); - - let print = m.is_present("PRINT"); - let render = !m.is_present("NO_RENDER"); - let quiet = m.is_present("QUIET"); - - let render_dir = required_value_of(m, "RENDER_DIR"); - let render_dir = Path::new(render_dir); - - command::plan::render::start(ui, - template_path, - default_toml_path, - user_toml_path, - mock_data_path, - print, - render, - render_dir, - quiet) -} - -async fn sub_pkg_install(ui: &mut UI, - m: &ArgMatches<'_>, - feature_flags: FeatureFlag) - -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = channel_from_matches_or_default(m); - let install_sources = install_sources_from_matches(m)?; - let token = maybe_auth_token(m); - let install_mode = - if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && m.is_present("OFFLINE") { - InstallMode::Offline - } else { - InstallMode::default() - }; - - let local_package_usage = if m.is_present("IGNORE_LOCAL") { - LocalPackageUsage::Ignore - } else { - LocalPackageUsage::default() - }; - - let install_hook_mode = if m.is_present("IGNORE_INSTALL_HOOK") { - InstallHookMode::Ignore - } else { - InstallHookMode::default() - }; - - init()?; - - for install_source in install_sources.iter() { - let pkg_install = - common::command::package::install::start(ui, - &url, - &channel, - install_source, - PRODUCT, - VERSION, - &FS_ROOT_PATH, - &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), - token.as_deref(), - &install_mode, - &local_package_usage, - install_hook_mode).await?; - - if let Some(dest_dir) = binlink_dest_dir_from_matches(m) { - let force = m.is_present("FORCE"); - command::pkg::binlink::binlink_all_in_pkg(ui, - pkg_install.ident(), - &dest_dir, - &FS_ROOT_PATH, - force)?; - } - } - Ok(()) -} - -fn sub_pkg_path(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - command::pkg::path::start(&ident, &FS_ROOT_PATH) -} - -fn sub_pkg_list(m: &ArgMatches<'_>) -> Result<()> { - let listing_type = ListingType::from(m); - - command::pkg::list::start(&listing_type) -} - -fn sub_pkg_provides(m: &ArgMatches<'_>) -> Result<()> { - let filename = required_value_of(m, "FILE"); - - let full_releases = m.is_present("FULL_RELEASES"); - let full_paths = m.is_present("FULL_PATHS"); - - command::pkg::provides::start(filename, &FS_ROOT_PATH, full_releases, full_paths) -} - -async fn sub_pkg_search(m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let search_term = required_value_of(m, "SEARCH_TERM"); - let limit = required_value_of(m, "LIMIT").parse().expect("valid LIMIT"); - let token = maybe_auth_token(m); - command::pkg::search::start(search_term, &url, limit, token.as_deref()).await -} - -fn sub_pkg_sign(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let origin = origin_param_or_env(m)?; - - let src = Path::new(required_value_of(m, "SOURCE")); - let dst = Path::new(required_value_of(m, "DEST")); - - let key_cache = key_cache_from_matches(m)?; - - init()?; - - let key = key_cache.latest_secret_origin_signing_key(&origin)?; - command::pkg::sign::start(ui, &key, src, dst) -} - -async fn sub_pkg_bulkupload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let upload_dir = bulkupload_dir_from_matches(m); - let artifact_path = upload_dir.join("artifacts"); - let key_path = upload_dir.join("keys"); - let key_cache = KeyCache::new(key_path); - key_cache.setup()?; - - let url = bldr_url_from_matches(m)?; - let additional_release_channel = channel_from_matches(m); - let force_upload = m.is_present("FORCE"); - let auto_build = if m.is_present("AUTO_BUILD") { - BuildOnUpload::PackageDefault - } else { - BuildOnUpload::Disable - }; - let auto_create_origins = m.is_present("AUTO_CREATE_ORIGINS"); - let token = auth_token_param_or_env(m)?; - - command::pkg::bulkupload::start(ui, - &url, - &additional_release_channel, - &token, - &artifact_path, - force_upload, - auto_build, - auto_create_origins, - &key_cache).await -} - -async fn sub_pkg_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let key_cache = key_cache_from_matches(m)?; - let url = bldr_url_from_matches(m)?; - - // When packages are uploaded, they *always* go to `unstable`; - // they can optionally get added to another channel, too. - let additional_release_channel = channel_from_matches(m); - - // When packages are uploaded we check if they exist in the db - // before allowing a write to the backend, this bypasses the check - let force_upload = m.is_present("FORCE"); - - let auto_build = if m.is_present("NO_BUILD") { - BuildOnUpload::Disable - } else { - BuildOnUpload::PackageDefault - }; - - let token = auth_token_param_or_env(m)?; - let artifact_paths = m.values_of("HART_FILE").unwrap(); // Required via clap - for artifact_path in artifact_paths.map(Path::new) { - command::pkg::upload::start(ui, - &url, - &additional_release_channel, - &token, - artifact_path, - force_upload, - auto_build, - &key_cache).await?; - } - Ok(()) -} - -async fn sub_pkg_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let token = auth_token_param_or_env(m)?; - let ident = required_pkg_ident_from_input(m)?; - let target = target_from_matches(m)?; - - command::pkg::delete::start(ui, &url, (&ident, target), &token).await?; - - Ok(()) -} - -fn sub_pkg_verify(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::pkg::verify::start(ui, src, &key_cache) -} - -fn sub_pkg_header(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - init()?; - - command::pkg::header::start(ui, src) -} - -fn sub_pkg_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let src = Path::new(required_value_of(m, "SOURCE")); - let to_json = m.is_present("TO_JSON"); - init()?; - - command::pkg::info::start(ui, src, to_json) -} - -async fn sub_pkg_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - let target = target_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - command::pkg::promote::start(ui, &url, (&ident, target), &channel, &token).await -} - -async fn sub_pkg_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let channel = required_channel_from_matches(m); - let token = auth_token_param_or_env(m)?; - let target = target_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - command::pkg::demote::start(ui, &url, (&ident, target), &channel, &token).await -} - -async fn sub_pkg_channels(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let url = bldr_url_from_matches(m)?; - let ident = required_pkg_ident_from_input(m)?; - let token = maybe_auth_token(m); - let target = target_from_matches(m)?; - - command::pkg::channels::start(ui, &url, (&ident, target), token.as_deref()).await -} - -async fn sub_svc_set(m: &ArgMatches<'_>) -> Result<()> { - let remote_sup_addr = remote_sup_from_input(m)?; - let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; - let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; - let mut ui = ui::ui(); - let mut validate = sup_proto::ctl::SvcValidateCfg { service_group: - Some(service_group.clone().into()), - ..Default::default() }; - let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_SVC_CFG_SIZE); - let cfg_len = match m.value_of("FILE") { - Some("-") | None => io::stdin().read_to_end(&mut buf)?, - Some(f) => { - let mut file = File::open(f)?; - file.read_to_end(&mut buf)? - } - }; - if cfg_len > sup_proto::butterfly::MAX_SVC_CFG_SIZE { - ui.fatal(format!("Configuration too large. Maximum size allowed is {} bytes.", - sup_proto::butterfly::MAX_SVC_CFG_SIZE))?; - process::exit(1); - } - validate.cfg = Some(buf.clone()); - let key_cache = key_cache_from_matches(m)?; - - let mut set = sup_proto::ctl::SvcSetCfg::default(); - match (service_group.org(), user_param_or_env(m)) { - (Some(_org), Some(username)) => { - let user_key = key_cache.latest_user_secret_key(&username)?; - let service_key = key_cache.latest_service_public_key(&service_group)?; - ui.status(Status::Encrypting, - format!("TOML as {} for {}", - user_key.named_revision(), - service_key.named_revision()))?; - set.cfg = Some(user_key.encrypt_for_service(&buf, &service_key) - .to_string() - .into_bytes()); - set.is_encrypted = Some(true); - } - _ => set.cfg = Some(buf.to_vec()), - } - set.service_group = Some(service_group.into()); - set.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); - ui.begin(format!("Setting new configuration version {} for {}", - set.version - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()), - set.service_group - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()),))?; - ui.status(Status::Creating, "service configuration")?; - let mut response = SrvClient::request(Some(&remote_sup_addr), validate).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - match ErrCode::try_from(m.code) { - Ok(ErrCode::InvalidPayload) => { - ui.warn(m)?; - } - _ => return Err(SrvClientError::from(m).into()), - } - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.status(Status::Applying, format!("via peer {}", remote_sup_addr))?; - let mut response = SrvClient::request(Some(&remote_sup_addr), set).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Applied configuration")?; - Ok(()) -} - -async fn sub_svc_config(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let remote_sup_addr = remote_sup_from_input(m)?; - let msg = sup_proto::ctl::SvcGetDefaultCfg { ident: Some(ident.into()), }; - let mut response = SrvClient::request(remote_sup_addr.as_ref(), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "ServiceCfg" => { - reply.parse::() - .map_err(SrvClientError::Decode)?; - } - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - Ok(()) -} - -async fn sub_svc_load(svc_load: SvcLoad) -> Result<()> { - let remote_sup_addr = svc_load.remote_sup.clone(); - let msg = habitat_sup_protocol::ctl::SvcLoad::try_from(svc_load)?; - gateway_util::send(remote_sup_addr.inner(), msg).await -} - -async fn sub_svc_bulk_load(svc_bulk_load: SvcBulkLoad) -> Result<()> { - let mut errors = HashMap::new(); - for svc_load in svc::svc_loads_from_paths(&svc_bulk_load.svc_config_paths)? { - let ident = svc_load.pkg_ident.clone().pkg_ident(); - if let Err(e) = sub_svc_load(svc_load).await { - errors.insert(ident, e); - } - } - if errors.is_empty() { - Ok(()) - } else { - Err(errors.into()) - } -} - -async fn sub_svc_unload(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let timeout_in_seconds = - parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); - let msg = sup_proto::ctl::SvcUnload { ident: Some(ident.into()), - timeout_in_seconds }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_svc_update(u: hab::cli::hab::svc::Update) -> Result<()> { - let ctl_addr = u.remote_sup.clone(); - let msg: sup_proto::ctl::SvcUpdate = TryFrom::try_from(u)?; - gateway_util::send(ctl_addr.inner(), msg).await -} - -async fn sub_svc_start(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let msg = sup_proto::ctl::SvcStart { ident: Some(ident.into()), }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_svc_status(pkg_ident: Option, - remote_sup: Option<&ResolvedListenCtlAddr>) - -> Result<()> { - let msg = sup_proto::ctl::SvcStatus { ident: pkg_ident.map(Into::into), }; - - let mut out = TabWriter::new(io::stdout()); - let mut response = SrvClient::request(remote_sup, msg).await?; - // Ensure there is at least one result from the server otherwise produce an error - if let Some(message_result) = response.next().await { - let reply = message_result?; - print_svc_status(&mut out, &reply, true)?; - } else { - return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()); - } - while let Some(message_result) = response.next().await { - let reply = message_result?; - print_svc_status(&mut out, &reply, false)?; - } - out.flush()?; - Ok(()) -} - -async fn sub_svc_stop(m: &ArgMatches<'_>) -> Result<()> { - let ident = required_pkg_ident_from_input(m)?; - let timeout_in_seconds = - parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); - let msg = sup_proto::ctl::SvcStop { ident: Some(ident.into()), - timeout_in_seconds }; - let remote_sup_addr = remote_sup_from_input(m)?; - gateway_util::send(remote_sup_addr.as_ref(), msg).await -} - -async fn sub_file_put(m: &ArgMatches<'_>) -> Result<()> { - let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; - let remote_sup_addr = remote_sup_from_input(m)?; - let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; - let mut ui = ui::ui(); - let mut msg = sup_proto::ctl::SvcFilePut::default(); - let file = Path::new(required_value_of(m, "FILE")); - if file.metadata()?.len() > sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES as u64 { - ui.fatal(format!("File too large. Maximum size allowed is {} bytes.", - sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES))?; - process::exit(1); - }; - msg.service_group = Some(service_group.clone().into()); - msg.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); - msg.filename = Some(file.file_name().unwrap().to_string_lossy().into_owned()); - let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES); - let key_cache = key_cache_from_matches(m)?; - - ui.begin(format!("Uploading file {} to {} incarnation {}", - file.display(), - msg.version - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UNKNOWN".to_string()), - msg.service_group - .as_ref() - .map(ToString::to_string) - .unwrap_or_else(|| "UKNOWN".to_string()),))?; - ui.status(Status::Creating, "service file")?; - File::open(file)?.read_to_end(&mut buf)?; - match (service_group.org(), user_param_or_env(m)) { - (Some(_org), Some(username)) => { - // That Some(_org) bit is really "was an org specified for - // this service group?" - let user_key = key_cache.latest_user_secret_key(&username)?; - let service_key = key_cache.latest_service_public_key(&service_group)?; - ui.status(Status::Encrypting, - format!("file as {} for {}", - user_key.named_revision(), - service_key.named_revision()))?; - msg.content = Some(user_key.encrypt_for_service(&buf, &service_key) - .to_string() - .into_bytes()); - msg.is_encrypted = Some(true); - } - _ => msg.content = Some(buf.to_vec()), - } - ui.status(Status::Applying, format!("via peer {}", remote_sup_addr)) - .unwrap(); - let mut response = SrvClient::request(Some(&remote_sup_addr), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - match ErrCode::try_from(m.code) { - Ok(ErrCode::InvalidPayload) => { - ui.warn(m)?; - } - _ => return Err(SrvClientError::from(m).into()), - } - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Uploaded file")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -async fn sub_sup_depart(member_id: String, - remote_sup: Option<&ResolvedListenCtlAddr>) - -> Result<()> { - let remote_sup = SrvClient::ctl_addr(remote_sup)?; - let mut ui = ui::ui(); - let msg = sup_proto::ctl::SupDepart { member_id: Some(member_id), }; - - ui.begin(format!("Permanently marking {} as departed", - msg.member_id.as_deref().unwrap_or("UNKNOWN"))) - .unwrap(); - ui.status(Status::Applying, format!("via peer {}", remote_sup)) - .unwrap(); - let mut response = SrvClient::request(Some(&remote_sup), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Departure recorded.")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -async fn sub_sup_restart(remote_sup: Option<&ResolvedListenCtlAddr>) -> Result<()> { - let remote_sup = SrvClient::ctl_addr(remote_sup)?; - let mut ui = ui::ui(); - let msg = sup_proto::ctl::SupRestart::default(); - - ui.begin(format!("Restarting supervisor {}", remote_sup))?; - let mut response = SrvClient::request(Some(&remote_sup), msg).await?; - while let Some(message_result) = response.next().await { - let reply = message_result?; - match reply.message_id() { - "NetOk" => (), - "NetErr" => { - let m = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(m).into()); - } - _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), - } - } - ui.end("Restart recorded.")?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -fn sub_sup_secret_generate() -> Result<()> { - let mut ui = ui::ui(); - let mut buf = String::new(); - sup_proto::generate_secret_key(&mut buf); - ui.info(buf)?; - Ok(()) -} - -#[cfg(not(target_os = "macos"))] -fn sub_sup_secret_generate_key(subject_alternative_name: &DnsName, path: PathBuf) -> Result<()> { - Ok(ctl_gateway_tls::generate_self_signed_certificate_and_key(subject_alternative_name, path) - .map_err(habitat_core::Error::from)?) -} - -fn sub_supportbundle(ui: &mut UI) -> Result<()> { - init()?; - - command::supportbundle::start(ui) -} - -fn sub_ring_key_export(m: &ArgMatches<'_>) -> Result<()> { - let ring = required_value_of(m, "RING"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::ring::key::export::start(ring, &key_cache) -} - -fn sub_ring_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let ring = required_value_of(m, "RING"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::ring::key::generate::start(ui, ring, &key_cache) -} - -fn sub_ring_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let mut content = String::new(); - let key_cache = key_cache_from_matches(m)?; - init()?; - io::stdin().read_to_string(&mut content)?; - - // Trim the content to lose line feeds added by Powershell pipeline - command::ring::key::import::start(ui, content.trim(), &key_cache) -} - -fn sub_service_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let org = org_param_or_env(m)?; - let service_group = required_value_of(m, "SERVICE_GROUP").parse()?; - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::service::key::generate::start(ui, &org, &service_group, &key_cache) -} - -fn sub_user_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { - let user = required_value_of(m, "USER"); - let key_cache = key_cache_from_matches(m)?; - init()?; - - command::user::key::generate::start(ui, user, &key_cache) -} - -fn args_after_first(args_to_skip: usize) -> Vec { - env::args_os().skip(args_to_skip).collect() -} - -/// Check to see if the user has passed in an AUTH_TOKEN param. If not, check the -/// HAB_AUTH_TOKEN env var. If not, check the CLI config to see if there is a default auth -/// token set. If that's empty too, then error. -fn auth_token_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("AUTH_TOKEN") { - Some(o) => Ok(o.to_string()), - None => { - match henv::var(AUTH_TOKEN_ENVVAR) { - Ok(v) => Ok(v), - Err(_) => { - CliConfig::load()?.auth_token.ok_or_else(|| { - Error::ArgumentError("No auth token \ - specified" - .into()) - }) - } - } - } - } -} - -/// Check to see if an auth token exists and convert it to a string slice if it does. Unlike -/// auth_token_param_or_env, it's ok for no auth token to be present here. This is useful for -/// commands that can optionally take an auth token for operating on private packages. -fn maybe_auth_token(m: &ArgMatches<'_>) -> Option { - match auth_token_param_or_env(m) { - Ok(t) => Some(t), - Err(_) => None, - } -} - -/// Check to see if the user has passed in an ORIGIN param. If not, check the HABITAT_ORIGIN env -/// var. If not, check the CLI config to see if there is a default origin set. If that's empty too, -/// then error. -// TODO (CM): sort out types better... there's a conflict with the CLI -// Origin in this module -fn origin_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("ORIGIN") { - Some(o) => Ok(o.parse()?), - None => { - match henv::var(ORIGIN_ENVVAR) { - Ok(v) => Ok(v.parse()?), - Err(_) => { - CliConfig::load()?.origin.ok_or_else(|| { - Error::CryptoCLI("No origin specified".to_string()) - }) - } - } - } - } -} - -/// Check to see if the user has passed in an ORG param. -/// If not, check the HABITAT_ORG env var. If that's -/// empty too, then error. -fn org_param_or_env(m: &ArgMatches<'_>) -> Result { - match m.value_of("ORG") { - Some(o) => Ok(o.to_string()), - None => henv::var(HABITAT_ORG_ENVVAR) - .map_err(|_| Error::CryptoCLI("No organization specified".to_string())), - } -} - -/// Check to see if the user has passed in a Builder URL param. If not, check the HAB_BLDR_URL env -/// var. If not, check the CLI config to see if there is a default url set. If that's empty too, -/// then we'll use the default (https://bldr.habitat.sh). -fn bldr_url_from_matches(matches: &ArgMatches<'_>) -> Result { - match matches.value_of("BLDR_URL") { - Some(url) => Ok(url.to_string()), - None => { - match henv::var(BLDR_URL_ENVVAR) { - Ok(v) => Ok(v), - Err(_) => { - let config = CliConfig::load()?; - match config.bldr_url { - Some(v) => Ok(v), - None => Ok(default_bldr_url()), - } - } - } - } - } -} - -/// Resolve a channel. Taken from the environment or from CLI args, if -/// given. -fn channel_from_matches(matches: &ArgMatches<'_>) -> Option { - matches.value_of("CHANNEL").map(ChannelIdent::from) -} - -/// Resolve a channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - channel_from_matches(matches).unwrap() -} - -/// Resolve a target channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_target_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - matches.value_of("TARGET_CHANNEL") - .map(ChannelIdent::from) - .expect("TARGET_CHANNEL is a required argument!") -} - -/// Resolve a source channel. Taken from the environment or from CLI args. This -/// should only be called when the argument is required by the CLAP config, -/// otherwise this would panic. -fn required_source_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { - matches.value_of("SOURCE_CHANNEL") - .map(ChannelIdent::from) - .expect("SOURCE_CHANNEl is a required argument!") -} -/// Resolve a channel. Taken from the environment or from CLI args, if -/// given or return the default channel value. -fn channel_from_matches_or_default(matches: &ArgMatches<'_>) -> ChannelIdent { - channel_from_matches(matches).unwrap_or_else(ChannelIdent::configured_value) -} - -/// Resolve a target. Default to x86_64-linux if none specified -fn target_from_matches(matches: &ArgMatches<'_>) -> Result { - matches.value_of("PKG_TARGET") - .map(PackageTarget::from_str) - .unwrap_or_else(|| Ok(active_target())) - .map_err(Error::HabitatCore) -} - -/// Return the path to create our binlinks in, or None if no binlinking should occur -fn binlink_dest_dir_from_matches(matches: &ArgMatches<'_>) -> Option { - // is_present always returns true since BINLINK_DIR has a default value, so we need to use - // occurrences_of to determine whether we actually want to do the binlinking - if matches.is_present("BINLINK") || matches.occurrences_of("BINLINK_DIR") > 0 { - matches.value_of("BINLINK_DIR").map(PathBuf::from) + if cfg!(feature = "v4") { + main_v4::main_v4().await } else { - None - } -} - -/// Helper function to determine active package target. -/// It overrides x86_64-darwin to be x86_64-linux in order -/// to provide a better user experience (ie, for the 99% case) -fn active_target() -> PackageTarget { - match PackageTarget::active_target() { - #[cfg(feature = "supported_targets")] - target::X86_64_DARWIN => target::X86_64_LINUX, - t => t, - } -} - -fn install_sources_from_matches(matches: &ArgMatches<'_>) -> Result> { - matches - .values_of("PKG_IDENT_OR_ARTIFACT") - .unwrap() // Required via clap - .map(|t| t.parse().map_err(Error::from)) - .collect() -} - -fn idents_from_matches(matches: &ArgMatches<'_>) -> Result> { - match matches.values_of("PKG_IDENT") { - Some(ident_strings) => { - ident_strings.map(|t| PackageIdent::from_str(t).map_err(Error::from)) - .collect() - } - _ => Ok(Vec::new()), // It's not an error to have no idents on command line - } -} - -fn idents_from_file_matches(ui: &mut UI, - matches: &ArgMatches<'_>, - cli_channel: &ChannelIdent, - cli_target: PackageTarget) - -> Result> { - let mut sources: Vec = Vec::new(); - - if let Some(files) = matches.values_of("PKG_IDENT_FILE") { - for f in files { - let filename = &f.to_string(); - if habitat_common::cli::is_toml_file(filename) { - let mut package_sets = idents_from_toml_file(ui, filename)?; - sources.append(&mut package_sets) - } else { - let idents_from_file = habitat_common::cli::file_into_idents(filename)?; - let package_set = PackageSet { idents: idents_from_file, - channel: cli_channel.clone(), - target: cli_target, }; - sources.push(package_set) - } - } - } - Ok(sources) -} - -fn idents_from_toml_file(ui: &mut UI, filename: &str) -> Result> { - let mut sources: Vec = Vec::new(); - - let file_data = std::fs::read_to_string(filename)?; - let toml_data: PackageSetFile = - toml::from_str(&file_data).map_err(habitat_common::Error::TomlParser)?; - - // We currently only accept version 1 - if toml_data.format_version.unwrap_or(1) != 1 { - return Err(Error::PackageSetParseError(format!( - "format_version invalid, only version 1 allowed ({} provided", - toml_data.format_version.unwrap() - ))); - } - - ui.status(Status::Using, - format!("File {}, '{}'", - filename, - toml_data.file_descriptor.unwrap_or_default()))?; - - for (target, target_array) in toml_data.targets { - for package_set_value in target_array { - let channel = package_set_value.channel; - let idents: Vec = strings_to_idents(&package_set_value.packages)?; - let package_set = PackageSet { target, - channel, - idents }; - debug!("Package Set {:?}", package_set); - sources.push(package_set) - } - } - Ok(sources) -} - -fn strings_to_idents(strings: &[String]) -> Result> { - let ident_or_results: Result> = - strings.iter() - .map(|s| PackageIdent::from_str(s).map_err(Error::from)) - .collect(); - ident_or_results -} - -fn verify_from_matches(matches: &ArgMatches<'_>) -> bool { matches.is_present("VERIFY") } -fn ignore_missing_seeds_from_matches(matches: &ArgMatches<'_>) -> bool { - matches.is_present("IGNORE_MISSING_SEEDS") -} - -fn download_dir_from_matches(matches: &ArgMatches<'_>) -> Option { - matches.value_of("DOWNLOAD_DIRECTORY").map(PathBuf::from) -} - -fn excludes_from_matches(matches: &ArgMatches<'_>) -> Vec { - matches - .values_of("EXCLUDE") - .unwrap_or_default() - .map(|i| PackageIdent::from_str(i).unwrap()) // unwrap safe as we've validated the input - .collect() -} - -fn print_svc_status(out: &mut T, - reply: &SrvMessage, - print_header: bool) - -> result::Result<(), SrvClientError> - where T: io::Write -{ - let status = match reply.message_id() { - "ServiceStatus" => { - reply.parse::() - .map_err(SrvClientError::Decode)? - } - "NetOk" => { - println!("No services loaded."); - return Ok(()); - } - "NetErr" => { - let err = reply.parse::() - .map_err(SrvClientError::Decode)?; - return Err(SrvClientError::from(err)); - } - _ => { - warn!("Unexpected status message, {:?}", reply); - return Ok(()); - } - }; - let svc_desired_state = status.desired_state - .map_or("".to_string(), |s| s.to_string()); - let (svc_state, svc_pid, svc_elapsed) = { - match status.process { - Some(process) => { - (process.state.to_string(), - process.pid - .map_or_else(|| "".to_string(), |p| p.to_string()), - process.elapsed.unwrap_or_default().to_string()) - } - None => { - (ProcessState::default().to_string(), "".to_string(), "".to_string()) - } - } - }; - if print_header { - writeln!(out, "{}", STATUS_HEADER.join("\t")).unwrap(); - } - // Composites were removed in 0.75 but people could be - // depending on the exact format of this output even if they - // never used composites. We don't want to break their tooling - // so we hardcode in 'standalone' as it's the only supported - // package type - // - // TODO: Remove this when we have a stable machine-readable alternative - // that scripts could depend on - writeln!(out, - "{}\tstandalone\t{}\t{}\t{}\t{}\t{}", - status.ident, - DesiredState::from_str(&svc_desired_state)?, - ProcessState::from_str(&svc_state)?, - svc_elapsed, - svc_pid, - status.service_group,)?; - Ok(()) -} - -fn bulkupload_dir_from_matches(matches: &ArgMatches<'_>) -> PathBuf { - matches.value_of("UPLOAD_DIRECTORY") - .map(PathBuf::from) - .expect("CLAP-validated upload dir") -} - -fn remote_sup_from_input(m: &ArgMatches<'_>) -> Result> { - Ok(m.value_of("REMOTE_SUP") - .map(ResolvedListenCtlAddr::from_str) - .transpose()?) -} - -fn required_pkg_ident_from_input(m: &ArgMatches<'_>) -> Result { - Ok(m.value_of("PKG_IDENT") - .expect("PKG_IDENT is a required argument") - .parse()?) -} - -/// Check to see if the user has passed in a USER param. -/// If not, check the HAB_USER env var. If that's -/// empty too, then return an error. -fn user_param_or_env(m: &ArgMatches<'_>) -> Option { - match m.value_of("USER") { - Some(u) => Some(u.to_string()), - None => { - match env::var(HABITAT_USER_ENVVAR) { - Ok(v) => Some(v), - Err(_) => None, - } - } - } -} - -/// Helper function to get information about the argument given its name -fn required_value_of<'a>(matches: &'a ArgMatches<'a>, name: &str) -> &'a str { - matches.value_of(name) - .unwrap_or_else(|| panic!("{} CLAP required arg missing", name)) -} - -#[cfg(test)] -mod test { - use super::*; - - mod binlink_dest_dir_from_matches { - use super::*; - - habitat_core::locked_env_var!(HAB_BINLINK_DIR, lock_binlink_env_var); - - #[test] - fn no_binlink_arg() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none(), - "without a --binlink arg, there should be no BINLINK matches"); - } - - #[test] - fn env_var_but_no_binlink_arg() { - let env_var = lock_binlink_env_var(); - env_var.set("/val/from/env/var"); - - assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none()); - } - - #[test] - #[should_panic(expected = "Invalid value")] - fn env_var_empty() { - let env_var = lock_binlink_env_var(); - env_var.set(""); - - dest_dir_from_pkg_install(&["origin/pkg"]); - } - - #[test] - fn env_var_overrides_binlink_default() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink"]), - Some(env_var_val.into()), - "with a no-value --binlink arg, the env var value should override the \ - default"); - } - - #[test] - fn binlink_dir_implies_binlink() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink-dir", arg_val]), - Some(arg_val.into())); - } - - #[test] - fn arg_val_overrides_default() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", - "--binlink", - "--binlink-dir", - arg_val]), - Some(arg_val.into()), - "The --binlink value should override the default"); - } - - #[test] - fn arg_val_overrides_env_var() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - let arg_val = "/val/from/args"; - assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", - "--binlink", - "--binlink-dir", - arg_val]), - Some(arg_val.into()), - "The --binlink value should override the env var value"); - } - - #[test] - fn binlink_before_pkg_ident_ok() { - let env_var = lock_binlink_env_var(); - env_var.unset(); - - assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), - Some(habitat_common::cli::DEFAULT_BINLINK_DIR.into())); - } - - #[test] - fn binlink_before_pkg_ident_with_env_var_ok() { - let env_var = lock_binlink_env_var(); - let env_var_val = "/val/from/env/var"; - env_var.set(env_var_val); - assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); - - assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), - Some(env_var_val.into())); - } - - fn matches_for_pkg_install<'a>(pkg_install_args: &'a [&'a str]) -> ArgMatches<'a> { - let pre_pkg_install_args = &["hab", "pkg", "install"]; - let app_matches = cli::get(FeatureFlag::empty()) - .get_matches_from_safe(pre_pkg_install_args.iter().chain(pkg_install_args.iter())) - .unwrap(); // Force panics on CLAP errors, so we can use #[should_panic] - match app_matches.subcommand() { - ("pkg", Some(matches)) => { - match matches.subcommand() { - ("install", Some(m)) => { - println!("{:#?}", m); - m.clone() - } - _ => unreachable!(), - } - } - _ => unreachable!(), - } - } - - fn dest_dir_from_pkg_install(pkg_install_args: &[&str]) -> Option { - let pkg_install_matches = &matches_for_pkg_install(pkg_install_args); - binlink_dest_dir_from_matches(pkg_install_matches) - } + #[cfg(feature = "v2")] + main_v2::main_v2().await } } diff --git a/components/hab/src/main_v2.rs b/components/hab/src/main_v2.rs new file mode 100755 index 0000000000..0313e2c931 --- /dev/null +++ b/components/hab/src/main_v2.rs @@ -0,0 +1,2174 @@ +use clap::{value_t, + ArgMatches, + ErrorKind as ClapErrorKind, + Shell}; +use configopt::{ConfigOpt, + Error as ConfigOptError}; +use futures::stream::StreamExt; +#[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),))] +use hab::cli::hab::pkg::ExportCommand as PkgExportCommand; +use hab::{cli::{self, + gateway_util, + hab::{license::License, + origin::{Rbac, + RbacSet, + RbacShow}, + pkg::PkgExec, + svc::{self, + BulkLoad as SvcBulkLoad, + Load as SvcLoad, + Svc}, + util::{bldr_auth_token_from_args_env_or_load, + bldr_url_from_args_env_load_or_default}, + Hab, + Origin, + Pkg}, + parse_optional_arg}, + command::{self, + pkg::{download::{PackageSet, + PackageSetFile}, + list::ListingType, + uninstall::UninstallHookMode}}, + error::{Error, + Result}, + key_type::KeyType, + license, + scaffolding, + AUTH_TOKEN_ENVVAR, + BLDR_URL_ENVVAR, + ORIGIN_ENVVAR, + PRODUCT, + VERSION}; +use habitat_api_client::BuildOnUpload; +use habitat_common::{self as common, + cli::key_cache_from_matches, + cli_config::CliConfig, + command::package::install::{InstallHookMode, + InstallMode, + InstallSource, + LocalPackageUsage}, + types::ResolvedListenCtlAddr, + ui::{self, + Status, + UIWriter, + UI}, + FeatureFlag}; +use habitat_core::{crypto::{init, + keys::{Key, + KeyCache}}, + env::{self as henv, + Config as _}, + fs::{cache_artifact_path, + FS_ROOT_PATH}, + os::process::ShutdownTimeout, + package::{target, + PackageIdent, + PackageTarget}, + service::ServiceGroup, + url::default_bldr_url, + ChannelIdent}; +use habitat_sup_client::{SrvClient, + SrvClientError}; +use habitat_sup_protocol::{self as sup_proto, + codec::*, + net::ErrCode, + types::*}; +use lazy_static::lazy_static; +use log::{debug, + warn}; +use std::{collections::HashMap, + convert::TryFrom, + env, + ffi::OsString, + fs::File, + io::{self, + prelude::*, + Read}, + path::{Path, + PathBuf}, + process, + result, + str::FromStr, + string::ToString, + thread}; +use tabwriter::TabWriter; + +#[cfg(not(target_os = "macos"))] +use hab::cli::hab::sup::{HabSup, + Secret, + Sup}; +#[cfg(not(target_os = "macos"))] +use habitat_core::tls::ctl_gateway as ctl_gateway_tls; +#[cfg(not(target_os = "macos"))] +use webpki::types::DnsName; + +/// Makes the --org CLI param optional when this env var is set +const HABITAT_ORG_ENVVAR: &str = "HAB_ORG"; +/// Makes the --user CLI param optional when this env var is set +const HABITAT_USER_ENVVAR: &str = "HAB_USER"; + +lazy_static! { + static ref STATUS_HEADER: Vec<&'static str> = { + vec!["package", + "type", + "desired", + "state", + "elapsed (s)", + "pid", + "group",] + }; +} + +#[cfg(feature = "v2")] +pub(crate) async fn main_v2() { + env_logger::init(); + let mut ui = UI::default_with_env(); + let flags = FeatureFlag::from_env(&mut ui); + if let Err(e) = start(&mut ui, flags).await { + let exit_code = e.exit_code(); + ui.fatal(e).unwrap(); + std::process::exit(exit_code) + } +} + +#[allow(clippy::cognitive_complexity)] +async fn start(ui: &mut UI, feature_flags: FeatureFlag) -> Result<()> { + // We parse arguments with configopt in a separate thread to eliminate + // possible stack overflow crashes at runtime. OSX or a debug Windows build, + // for instance, will crash with our large tree. This is a known issue: + // https://github.com/kbknapp/clap-rs/issues/86 + let child = thread::Builder::new().stack_size(8 * 1024 * 1024) + .spawn(Hab::try_from_args_with_configopt) + .unwrap(); + let hab = child.join().unwrap(); + + if let Ok(Hab::License(License::Accept)) = hab { + license::accept_license(ui)?; + return Ok(()); + } + + // Allow checking version information and displaying command help without accepting the license. + // TODO (DM): To prevent errors in discrepancy between the structopt and cli versions only do + // this when the license has not yet been accepted. When we switch fully to structopt this can + // be completely removed and we should just call `Hab::from_args_with_configopt` which will + // automatically result in this functionality. + if !license::check_for_license_acceptance().unwrap_or_default() + .accepted() + { + if let Err(ConfigOptError::Clap(e)) = &hab { + if e.kind == ClapErrorKind::VersionDisplayed || e.kind == ClapErrorKind::HelpDisplayed { + e.exit() + } + } + } + + // We must manually detect a supervisor version check and call the `hab-sup` binary to get the + // true Supervisor version. + // TODO (DM): This is an ugly consequence of having `hab sup` subcommands handled by both the + // `hab` binary and the `hab-sup` binary. Potential fixes: + // 1. Handle all `hab sup` subcommands with the `hab-sup` binary + // 2. Have a dedicated subcommand for commands handled by the `hab-sup` binary + let mut args = env::args(); + if matches!((args.next().unwrap_or_default().as_str(), + args.next().unwrap_or_default().as_str(), + args.next().unwrap_or_default().as_str()), + (_, "sup", "--version") | (_, "sup", "-V")) + { + return command::sup::start(ui, &args_after_first(2)).await; + } + + license::check_for_license_acceptance_and_prompt(ui)?; + + // Parse and handle commands which have been migrated to use `structopt` here. Once everything + // is migrated to use `structopt` the parsing logic below this using clap directly will be gone. + match hab { + Ok(hab) => { + match hab { + Hab::Origin(Origin::Rbac(action)) => { + match action { + Rbac::Set(rbac_set) => { + return sub_origin_member_role_set(ui, rbac_set).await; + } + Rbac::Show(rbac_show) => { + return sub_origin_member_role_show(ui, rbac_show).await; + } + } + } + #[cfg(not(target_os = "macos"))] + Hab::Run(sup_run) => { + ui.warn("'hab run' as an alias for 'hab sup run' is deprecated. Please \ + update your automation and processes accordingly.")?; + return command::launcher::start(ui, sup_run, &args_after_first(1)).await; + } + #[cfg(any(target_os = "macos", + any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),)))] + Hab::Studio(studio) => { + return command::studio::enter::start(ui, studio.args()).await; + } + #[cfg(not(target_os = "macos"))] + Hab::Sup(sup) => { + match sup { + HabSup::Sup(sup) => { + // These commands are handled by the `hab-sup` or `hab-launch` binaries. + // We need to pass the subcommand that was issued to the underlying + // binary. It is a bit hacky, but to do that we strip off the `hab sup` + // command prefix and pass the rest of the args to underlying binary. + let args = args_after_first(2); + match sup { + #[cfg(any( + all(target_os = "linux", any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"), + ))] + Sup::Bash | Sup::Sh => { + return command::sup::start(ui, &args).await; + } + Sup::Term => { + return command::sup::start(ui, &args).await; + } + Sup::Run(sup_run) => { + return command::launcher::start(ui, sup_run, &args).await; + } + } + } + HabSup::Depart { member_id, + remote_sup, } => { + return sub_sup_depart(member_id, remote_sup.inner()).await; + } + HabSup::Secret(secret) => { + match secret { + Secret::Generate => return sub_sup_secret_generate(), + Secret::GenerateTls { subject_alternative_name, + path, } => { + return sub_sup_secret_generate_key(&subject_alternative_name.dns_name()?, + path) + } + } + } + HabSup::Status { pkg_ident, + remote_sup, } => { + ui.warn("'hab sup status' as an alias for 'hab svc status' is \ + deprecated. Please update your automation and processes \ + accordingly.")?; + return sub_svc_status(pkg_ident, remote_sup.inner()).await; + } + HabSup::Restart { remote_sup } => { + return sub_sup_restart(remote_sup.inner()).await; + } + } + } + Hab::Svc(svc) => { + match svc { + Svc::BulkLoad(svc_bulk_load) => { + if feature_flags.contains(FeatureFlag::SERVICE_CONFIG_FILES) { + return sub_svc_bulk_load(svc_bulk_load).await; + } else { + return Err(Error::ArgumentError(String::from("`hab svc bulkload` is only available when `HAB_FEAT_SERVICE_CONFIG_FILES` is set"))); + } + } + Svc::Load(svc_load) => { + return sub_svc_load(svc_load).await; + } + Svc::Update(svc_update) => return sub_svc_update(svc_update).await, + Svc::Status(svc_status) => { + return sub_svc_status(svc_status.pkg_ident, + svc_status.remote_sup.inner()).await; + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + #[cfg(not(target_os = "macos"))] + Hab::Term => { + ui.warn("'hab term' as an alias for 'hab sup term' is deprecated. Please \ + update your automation and processes accordingly.")?; + return command::sup::start(ui, &args_after_first(1)).await; + } + Hab::Pkg(pkg) => { + #[allow(clippy::collapsible_match)] + match pkg { + // package export is not available on platforms that have no package support + #[cfg(any(all(target_os = "linux", + any(target_arch = "x86_64", target_arch = "aarch64")), + all(target_os = "windows", target_arch = "x86_64"),))] + Pkg::Export(export) => { + match export { + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Container(args) => { + return command::pkg::export::container::start(ui, &args.args).await; + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Docker(args) => { + ui.warn("'hab pkg export docker' is now a deprecated alias \ + for 'hab pkg export container'. Please update your \ + automation and processes accordingly.")?; + return command::pkg::export::container::start(ui, &args.args).await; + } + #[cfg(any(target_os = "linux", target_os = "windows"))] + PkgExportCommand::Tar(args) => { + return command::pkg::export::tar::start(ui, &args.args).await; + } + } + } + Pkg::Exec(PkgExec { pkg_ident, + cmd, + args, }) => { + return command::pkg::exec::start(&pkg_ident.pkg_ident(), + cmd, + &args.args); + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + _ => { + // All other commands will be caught by the CLI parsing logic below. + } + } + } + Err(e @ ConfigOptError::ConfigGenerated(_) + | e @ ConfigOptError::ConfigFile(..) + | e @ ConfigOptError::Toml(..)) => e.exit(), + Err(_) => { + // Completely ignore all other errors. They will be caught by the CLI parsing logic + // below. + } + }; + + // Similar to the configopt parsing above We build the command tree in a + // separate thread to eliminate possible stack overflow crashes at runtime. + // See known issue:https://github.com/kbknapp/clap-rs/issues/86 + let cli_child = thread::Builder::new().stack_size(8 * 1024 * 1024) + .spawn(move || { + cli::get(feature_flags).get_matches_safe() + .unwrap_or_else(|e| { + e.exit(); + }) + }) + .unwrap(); + let app_matches = cli_child.join().unwrap(); + + match app_matches.subcommand() { + ("apply", Some(m)) => { + ui.warn("'hab apply' as an alias for 'hab config apply' is deprecated. Please \ + update your automation and processes accordingly.")?; + sub_svc_set(m).await? + } + ("cli", Some(matches)) => { + match matches.subcommand() { + ("setup", Some(m)) => sub_cli_setup(ui, m)?, + ("completers", Some(m)) => sub_cli_completers(m, feature_flags), + _ => unreachable!(), + } + } + ("config", Some(m)) => { + match m.subcommand() { + ("apply", Some(m)) => sub_svc_set(m).await?, + ("show", Some(m)) => sub_svc_config(m).await?, + _ => unreachable!(), + } + } + ("file", Some(m)) => { + match m.subcommand() { + ("upload", Some(m)) => sub_file_put(m).await?, + _ => unreachable!(), + } + } + ("install", Some(m)) => { + ui.warn("'hab install' as an alias for 'hab pkg install' is deprecated. Please \ + update your automation and processes accordingly.")?; + sub_pkg_install(ui, m, feature_flags).await? + } + ("origin", Some(matches)) => { + match matches.subcommand() { + ("invitations", Some(m)) => { + match m.subcommand() { + ("accept", Some(sc)) => sub_accept_origin_invitation(ui, sc).await?, + ("ignore", Some(sc)) => sub_ignore_origin_invitation(ui, sc).await?, + ("list", Some(sc)) => sub_list_user_invitations(ui, sc).await?, + ("pending", Some(sc)) => sub_list_pending_origin_invitations(ui, sc).await?, + ("send", Some(sc)) => sub_send_origin_invitation(ui, sc).await?, + ("rescind", Some(sc)) => sub_rescind_origin_invitation(ui, sc).await?, + _ => unreachable!(), + } + } + ("key", Some(m)) => { + match m.subcommand() { + ("download", Some(sc)) => sub_origin_key_download(ui, sc).await?, + ("export", Some(sc)) => sub_origin_key_export(sc)?, + ("generate", Some(sc)) => sub_origin_key_generate(ui, sc)?, + ("import", Some(sc)) => sub_origin_key_import(ui, sc)?, + ("upload", Some(sc)) => sub_origin_key_upload(ui, sc).await?, + _ => unreachable!(), + } + } + ("secret", Some(m)) => { + match m.subcommand() { + ("upload", Some(sc)) => sub_origin_secret_upload(ui, sc).await?, + ("delete", Some(sc)) => sub_origin_secret_delete(ui, sc).await?, + ("list", Some(sc)) => sub_origin_secret_list(ui, sc).await?, + _ => unreachable!(), + } + } + ("create", Some(m)) => sub_origin_create(ui, m).await?, + ("delete", Some(m)) => sub_origin_delete(ui, m).await?, + ("transfer", Some(m)) => sub_origin_transfer_ownership(ui, m).await?, + ("depart", Some(m)) => sub_origin_depart(ui, m).await?, + ("info", Some(m)) => sub_origin_info(ui, m).await?, + _ => unreachable!(), + } + } + ("bldr", Some(matches)) => { + match matches.subcommand() { + ("job", Some(m)) => { + match m.subcommand() { + ("start", Some(m)) => sub_bldr_job_start(ui, m).await?, + ("cancel", Some(m)) => sub_bldr_job_cancel(ui, m).await?, + ("promote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, true).await?, + ("demote", Some(m)) => sub_bldr_job_promote_or_demote(ui, m, false).await?, + ("status", Some(m)) => sub_bldr_job_status(ui, m).await?, + _ => unreachable!(), + } + } + ("channel", Some(m)) => { + match m.subcommand() { + ("create", Some(m)) => sub_bldr_channel_create(ui, m).await?, + ("destroy", Some(m)) => sub_bldr_channel_destroy(ui, m).await?, + ("list", Some(m)) => sub_bldr_channel_list(ui, m).await?, + ("promote", Some(m)) => sub_bldr_channel_promote(ui, m).await?, + ("demote", Some(m)) => sub_bldr_channel_demote(ui, m).await?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + ("pkg", Some(matches)) => { + match matches.subcommand() { + ("binds", Some(m)) => sub_pkg_binds(m)?, + ("binlink", Some(m)) => sub_pkg_binlink(ui, m)?, + ("build", Some(m)) => sub_pkg_build(ui, m, feature_flags).await?, + ("channels", Some(m)) => sub_pkg_channels(ui, m).await?, + ("config", Some(m)) => sub_pkg_config(m)?, + ("dependencies", Some(m)) => sub_pkg_dependencies(m)?, + ("download", Some(m)) => sub_pkg_download(ui, m, feature_flags).await?, + ("env", Some(m)) => sub_pkg_env(m)?, + ("hash", Some(m)) => sub_pkg_hash(m)?, + ("install", Some(m)) => sub_pkg_install(ui, m, feature_flags).await?, + ("list", Some(m)) => sub_pkg_list(m)?, + ("path", Some(m)) => sub_pkg_path(m)?, + ("provides", Some(m)) => sub_pkg_provides(m)?, + ("search", Some(m)) => sub_pkg_search(m).await?, + ("sign", Some(m)) => sub_pkg_sign(ui, m)?, + ("uninstall", Some(m)) => sub_pkg_uninstall(ui, m).await?, + ("upload", Some(m)) => sub_pkg_upload(ui, m).await?, + ("bulkupload", Some(m)) => sub_pkg_bulkupload(ui, m).await?, + ("delete", Some(m)) => sub_pkg_delete(ui, m).await?, + ("verify", Some(m)) => sub_pkg_verify(ui, m)?, + ("header", Some(m)) => sub_pkg_header(ui, m)?, + ("info", Some(m)) => sub_pkg_info(ui, m)?, + ("promote", Some(m)) => sub_pkg_promote(ui, m).await?, + ("demote", Some(m)) => sub_pkg_demote(ui, m).await?, + _ => unreachable!(), + } + } + ("plan", Some(matches)) => { + match matches.subcommand() { + ("init", Some(m)) => sub_plan_init(ui, m)?, + ("render", Some(m)) => sub_plan_render(ui, m)?, + _ => unreachable!(), + } + } + ("ring", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("export", Some(sc)) => sub_ring_key_export(sc)?, + ("import", Some(sc)) => sub_ring_key_import(ui, sc)?, + ("generate", Some(sc)) => sub_ring_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + ("svc", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("generate", Some(sc)) => sub_service_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + ("unload", Some(m)) => sub_svc_unload(m).await?, + ("start", Some(m)) => sub_svc_start(m).await?, + ("stop", Some(m)) => sub_svc_stop(m).await?, + _ => unreachable!(), + } + } + ("supportbundle", _) => sub_supportbundle(ui)?, + ("setup", Some(m)) => { + ui.warn("'hab setup' as an alias for 'hab cli setup' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_cli_setup(ui, m)? + } + ("start", Some(m)) => { + ui.warn("'hab start' as an alias for 'hab svc start' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_svc_start(m).await? + } + ("stop", Some(m)) => { + ui.warn("'hab stop' as an alias for 'hab svc stop' is deprecated. Please update \ + your automation and processes accordingly.")?; + sub_svc_stop(m).await? + } + ("user", Some(matches)) => { + match matches.subcommand() { + ("key", Some(m)) => { + match m.subcommand() { + ("generate", Some(sc)) => sub_user_key_generate(ui, sc)?, + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + _ => unreachable!(), + }; + Ok(()) +} + +fn sub_cli_setup(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::cli::setup::start(ui, &key_cache) +} + +fn sub_cli_completers(m: &ArgMatches<'_>, feature_flags: FeatureFlag) { + let shell = m.value_of("SHELL") + .expect("Missing Shell; A shell is required"); + + // TODO (CM): Interesting... the completions generated can depend + // on what feature flags happen to be enabled at the time you + // generated the completions + cli::get(feature_flags).gen_completions_to("hab", + shell.parse::().unwrap(), + &mut io::stdout()); +} + +async fn sub_origin_key_download(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN").parse()?; + let revision = m.value_of("REVISION"); + let with_secret = m.is_present("WITH_SECRET"); + let with_encryption = m.is_present("WITH_ENCRYPTION"); + let token = maybe_auth_token(m); + let url = bldr_url_from_matches(m)?; + let key_cache = key_cache_from_matches(m)?; + + command::origin::key::download::start(ui, + &url, + &origin, + revision, + with_secret, + with_encryption, + token.as_deref(), + &key_cache).await +} + +fn sub_origin_key_export(m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN").parse()?; + let key_type = KeyType::from_str(m.value_of("KEY_TYPE").unwrap_or("public"))?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::origin::key::export::start(&origin, key_type, &key_cache) +} + +fn sub_origin_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = origin_param_or_env(m)?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::origin::key::generate::start(ui, &origin, &key_cache) +} + +fn sub_origin_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let mut content = String::new(); + let key_cache = key_cache_from_matches(m)?; + init()?; + io::stdin().read_to_string(&mut content)?; + + // Trim the content to lose line feeds added by Powershell pipeline + command::origin::key::import::start(ui, content.trim(), &key_cache) +} + +async fn sub_origin_key_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let key_cache = key_cache_from_matches(m)?; + + init()?; + + match m.value_of("ORIGIN") { + Some(origin) => { + let origin = origin.parse()?; + // you can either specify files, or infer the latest key names + let with_secret = m.is_present("WITH_SECRET"); + command::origin::key::upload_latest::start(ui, + &url, + &token, + &origin, + with_secret, + &key_cache).await + } + None => { + let keyfile = Path::new(required_value_of(m, "PUBLIC_FILE")); + let secret_keyfile = m.value_of("SECRET_FILE").map(Path::new); + command::origin::key::upload::start(ui, &url, &token, keyfile, secret_keyfile).await + } + } +} + +async fn sub_origin_secret_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + let key = required_value_of(m, "KEY_NAME"); + let secret = required_value_of(m, "SECRET"); + let key_cache = key_cache_from_matches(m)?; + command::origin::secret::upload::start(ui, &url, &token, &origin, key, secret, &key_cache).await +} + +async fn sub_origin_secret_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + let key = required_value_of(m, "KEY_NAME"); + command::origin::secret::delete::start(ui, &url, &token, &origin, key).await +} + +async fn sub_origin_secret_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let origin = origin_param_or_env(m)?; + command::origin::secret::list::start(ui, &url, &token, &origin).await +} + +async fn sub_origin_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::create::start(ui, &url, &token, origin).await +} + +async fn sub_origin_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let to_json = m.is_present("TO_JSON"); + command::origin::info::start(ui, &url, &token, origin, to_json).await +} + +async fn sub_origin_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::delete::start(ui, &url, &token, origin).await +} + +async fn sub_origin_transfer_ownership(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let account = required_value_of(m, "NEW_OWNER_ACCOUNT"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::transfer::start(ui, &url, &token, origin, account).await +} + +async fn sub_origin_depart(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::depart::start(ui, &url, &token, origin).await +} + +async fn sub_accept_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::accept::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_ignore_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::ignore::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_list_user_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::list_user::start(ui, &url, &token).await +} + +async fn sub_list_pending_origin_invitations(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::list_pending_origin::start(ui, &url, origin, &token).await +} + +async fn sub_rescind_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitation_id = + required_value_of(m, "INVITATION_ID").parse() + .expect("INVITATION_ID should be valid at this point"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::rescind::start(ui, &url, origin, &token, invitation_id).await +} + +async fn sub_send_origin_invitation(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = required_value_of(m, "ORIGIN"); + let invitee_account = required_value_of(m, "INVITEE_ACCOUNT"); + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + command::origin::invitations::send::start(ui, &url, origin, &token, invitee_account).await +} + +async fn sub_origin_member_role_show(ui: &mut UI, r: RbacShow) -> Result<()> { + let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; + let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; + command::origin::rbac::show_role::start(ui, + bldr_url, + r.origin.inner, + &auth_token, + &r.member_account, + r.to_json).await +} + +async fn sub_origin_member_role_set(ui: &mut UI, r: RbacSet) -> Result<()> { + let bldr_url = bldr_url_from_args_env_load_or_default(r.bldr_url.value)?; + let auth_token = bldr_auth_token_from_args_env_or_load(r.auth_token.value)?; + command::origin::rbac::set_role::start(ui, + bldr_url, + r.origin.inner, + &auth_token, + &r.member_account, + r.role, + r.no_prompt).await +} + +fn sub_pkg_binlink(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let dest_dir = Path::new(required_value_of(m, "DEST_DIR")); + let force = m.is_present("FORCE"); + match m.value_of("BINARY") { + Some(binary) => { + command::pkg::binlink::start(ui, &ident, binary, dest_dir, &FS_ROOT_PATH, force) + } + None => { + command::pkg::binlink::binlink_all_in_pkg(ui, &ident, dest_dir, &FS_ROOT_PATH, force) + } + } +} + +/// Generate a (possibly empty) list of `Origin`s from the value of +/// the `HAB_ORIGIN_KEYS` environment variable / `--keys` argument. +fn hab_key_origins(m: &ArgMatches<'_>) -> Result> { + m.values_of("HAB_ORIGIN_KEYS") + .unwrap_or_default() + .map(|n| n.parse().map_err(Into::into)) + .collect() +} + +#[allow(unused_variables)] +async fn sub_pkg_build(ui: &mut UI, m: &ArgMatches<'_>, feature_flags: FeatureFlag) -> Result<()> { + let plan_context = required_value_of(m, "PLAN_CONTEXT"); + let root = m.value_of("HAB_STUDIO_ROOT"); + let src = m.value_of("SRC_PATH"); + let refresh_channel = m.value_of("REFRESH_CHANNEL"); + + let origins = hab_key_origins(m)?; + if !origins.is_empty() { + init()?; + let key_cache = key_cache_from_matches(m)?; + for origin in origins.iter() { + // Validate that a secret signing key is present on disk + // for each origin. + key_cache.latest_secret_origin_signing_key(origin)?; + } + } + + #[cfg(target_family = "unix")] + let native_package = if m.is_present("NATIVE_PACKAGE") { + if !feature_flags.contains(FeatureFlag::NATIVE_PACKAGE_SUPPORT) { + return Err(Error::ArgumentError(String::from("`--native-package` is \ + only available when \ + `HAB_FEAT_NATIVE_PACKAGE_SUPPORT` \ + is set"))); + } + true + } else { + false + }; + #[cfg(target_family = "windows")] + let native_package = false; + + let docker = m.is_present("DOCKER"); + let reuse = m.is_present("REUSE"); + + command::pkg::build::start(ui, + plan_context, + root, + src, + &origins, + native_package, + reuse, + docker, + refresh_channel).await +} + +fn sub_pkg_config(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + common::command::package::config::start(&ident, &*FS_ROOT_PATH)?; + Ok(()) +} + +fn sub_pkg_binds(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + common::command::package::binds::start(&ident, &*FS_ROOT_PATH)?; + Ok(()) +} + +fn sub_pkg_dependencies(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let scope = if m.is_present("TRANSITIVE") { + command::pkg::Scope::PackageAndDependencies + } else { + command::pkg::Scope::Package + }; + + let direction = if m.is_present("REVERSE") { + command::pkg::DependencyRelation::Supports + } else { + command::pkg::DependencyRelation::Requires + }; + command::pkg::dependencies::start(&ident, scope, direction, &FS_ROOT_PATH) +} + +async fn sub_pkg_download(ui: &mut UI, + m: &ArgMatches<'_>, + _feature_flags: FeatureFlag) + -> Result<()> { + let token = maybe_auth_token(m); + let url = bldr_url_from_matches(m)?; + let download_dir = download_dir_from_matches(m); + + // Construct flat file based inputs + let channel = channel_from_matches_or_default(m); + let target = target_from_matches(m)?; + + let install_sources = idents_from_matches(m)?; + + let mut package_sets = vec![PackageSet { target, + channel: channel.clone(), + idents: install_sources }]; + + let mut install_sources_from_file = idents_from_file_matches(ui, m, &channel, target)?; + package_sets.append(&mut install_sources_from_file); + package_sets.retain(|set| !set.idents.is_empty()); + + let verify = verify_from_matches(m); + let ignore_missing_seeds = ignore_missing_seeds_from_matches(m); + + init()?; + + command::pkg::download::start(ui, + &url, + PRODUCT, + VERSION, + &package_sets, + download_dir.as_ref(), + token.as_deref(), + verify, + ignore_missing_seeds).await?; + Ok(()) +} + +fn sub_pkg_env(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + command::pkg::env::start(&ident, &FS_ROOT_PATH) +} + +fn sub_pkg_hash(m: &ArgMatches<'_>) -> Result<()> { + init()?; + match m.value_of("SOURCE") { + Some(source) => { + // hash single file + command::pkg::hash::start(source) + } + None => { + // read files from stdin + let stdin = io::stdin(); + for line in stdin.lock().lines() { + let file = line?; + command::pkg::hash::start(file.trim_end())?; + } + Ok(()) + } + } +} + +async fn sub_pkg_uninstall(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let execute_strategy = if m.is_present("DRYRUN") { + command::pkg::ExecutionStrategy::DryRun + } else { + command::pkg::ExecutionStrategy::Run + }; + let mode = command::pkg::uninstall::UninstallMode::from(m); + let scope = if m.is_present("NO_DEPS") { + command::pkg::Scope::Package + } else { + command::pkg::Scope::PackageAndDependencies + }; + let excludes = excludes_from_matches(m); + let uninstall_hook_mode = if m.is_present("IGNORE_UNINSTALL_HOOK") { + UninstallHookMode::Ignore + } else { + UninstallHookMode::default() + }; + + command::pkg::uninstall::start(ui, + &ident, + &FS_ROOT_PATH, + execute_strategy, + mode, + scope, + &excludes, + uninstall_hook_mode).await +} + +async fn sub_bldr_channel_create(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + command::bldr::channel::create::start(ui, &url, &token, &origin, &channel).await +} + +async fn sub_bldr_channel_destroy(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + command::bldr::channel::destroy::start(ui, &url, &token, &origin, &channel).await +} + +async fn sub_bldr_channel_list(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let include_sandbox_channels = m.is_present("SANDBOX"); + command::bldr::channel::list::start(ui, &url, &origin, include_sandbox_channels).await +} + +async fn sub_bldr_channel_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let token = auth_token_param_or_env(m)?; + let source_channel = required_source_channel_from_matches(m); + let target_channel = required_target_channel_from_matches(m); + command::bldr::channel::promote::start(ui, + &url, + &token, + &origin, + &source_channel, + &target_channel).await +} + +async fn sub_bldr_channel_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let origin = origin_param_or_env(m)?; + let token = auth_token_param_or_env(m)?; + let source_channel = required_source_channel_from_matches(m); + let target_channel = required_target_channel_from_matches(m); + command::bldr::channel::demote::start(ui, + &url, + &token, + &origin, + &source_channel, + &target_channel).await +} + +#[allow(unused)] +async fn sub_bldr_job_start(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_cancel(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_promote_or_demote(ui: &mut UI, + m: &ArgMatches<'_>, + promote: bool) + -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +#[allow(unused)] +async fn sub_bldr_job_status(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + Err(Error::BuilderBuildFunctionsRemoved) +} + +fn sub_plan_init(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let name = m.value_of("PKG_NAME").map(String::from); + let origin = origin_param_or_env(m)?; + let minimal = m.is_present("MIN"); + let scaffolding_ident = if cfg!(windows) { + match m.value_of("SCAFFOLDING") { + Some(scaffold) => Some(PackageIdent::from_str(scaffold)?), + None => None, + } + } else { + scaffolding::scaffold_check(ui, m.value_of("SCAFFOLDING"))? + }; + + command::plan::init::start(ui, &origin, minimal, scaffolding_ident, name) +} + +fn sub_plan_render(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let template_path = required_value_of(m, "TEMPLATE_PATH"); + let template_path = Path::new(template_path); + + let default_toml_path = required_value_of(m, "DEFAULT_TOML"); + let default_toml_path = Path::new(default_toml_path); + + let user_toml_path = m.value_of("USER_TOML").map(Path::new); + + let mock_data_path = m.value_of("MOCK_DATA").map(Path::new); + + let print = m.is_present("PRINT"); + let render = !m.is_present("NO_RENDER"); + let quiet = m.is_present("QUIET"); + + let render_dir = required_value_of(m, "RENDER_DIR"); + let render_dir = Path::new(render_dir); + + command::plan::render::start(ui, + template_path, + default_toml_path, + user_toml_path, + mock_data_path, + print, + render, + render_dir, + quiet) +} + +async fn sub_pkg_install(ui: &mut UI, + m: &ArgMatches<'_>, + feature_flags: FeatureFlag) + -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = channel_from_matches_or_default(m); + let install_sources = install_sources_from_matches(m)?; + let token = maybe_auth_token(m); + let install_mode = + if feature_flags.contains(FeatureFlag::OFFLINE_INSTALL) && m.is_present("OFFLINE") { + InstallMode::Offline + } else { + InstallMode::default() + }; + + let local_package_usage = if m.is_present("IGNORE_LOCAL") { + LocalPackageUsage::Ignore + } else { + LocalPackageUsage::default() + }; + + let install_hook_mode = if m.is_present("IGNORE_INSTALL_HOOK") { + InstallHookMode::Ignore + } else { + InstallHookMode::default() + }; + + init()?; + + for install_source in install_sources.iter() { + let pkg_install = + common::command::package::install::start(ui, + &url, + &channel, + install_source, + PRODUCT, + VERSION, + &FS_ROOT_PATH, + &cache_artifact_path(Some(FS_ROOT_PATH.as_path())), + token.as_deref(), + &install_mode, + &local_package_usage, + install_hook_mode).await?; + + if let Some(dest_dir) = binlink_dest_dir_from_matches(m) { + let force = m.is_present("FORCE"); + command::pkg::binlink::binlink_all_in_pkg(ui, + pkg_install.ident(), + &dest_dir, + &FS_ROOT_PATH, + force)?; + } + } + Ok(()) +} + +fn sub_pkg_path(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + command::pkg::path::start(&ident, &FS_ROOT_PATH) +} + +fn sub_pkg_list(m: &ArgMatches<'_>) -> Result<()> { + let listing_type = ListingType::from(m); + + command::pkg::list::start(&listing_type) +} + +fn sub_pkg_provides(m: &ArgMatches<'_>) -> Result<()> { + let filename = required_value_of(m, "FILE"); + + let full_releases = m.is_present("FULL_RELEASES"); + let full_paths = m.is_present("FULL_PATHS"); + + command::pkg::provides::start(filename, &FS_ROOT_PATH, full_releases, full_paths) +} + +async fn sub_pkg_search(m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let search_term = required_value_of(m, "SEARCH_TERM"); + let limit = required_value_of(m, "LIMIT").parse().expect("valid LIMIT"); + let token = maybe_auth_token(m); + command::pkg::search::start(search_term, &url, limit, token.as_deref()).await +} + +fn sub_pkg_sign(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let origin = origin_param_or_env(m)?; + + let src = Path::new(required_value_of(m, "SOURCE")); + let dst = Path::new(required_value_of(m, "DEST")); + + let key_cache = key_cache_from_matches(m)?; + + init()?; + + let key = key_cache.latest_secret_origin_signing_key(&origin)?; + command::pkg::sign::start(ui, &key, src, dst) +} + +async fn sub_pkg_bulkupload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let upload_dir = bulkupload_dir_from_matches(m); + let artifact_path = upload_dir.join("artifacts"); + let key_path = upload_dir.join("keys"); + let key_cache = KeyCache::new(key_path); + key_cache.setup()?; + + let url = bldr_url_from_matches(m)?; + let additional_release_channel = channel_from_matches(m); + let force_upload = m.is_present("FORCE"); + let auto_build = if m.is_present("AUTO_BUILD") { + BuildOnUpload::PackageDefault + } else { + BuildOnUpload::Disable + }; + let auto_create_origins = m.is_present("AUTO_CREATE_ORIGINS"); + let token = auth_token_param_or_env(m)?; + + command::pkg::bulkupload::start(ui, + &url, + &additional_release_channel, + &token, + &artifact_path, + force_upload, + auto_build, + auto_create_origins, + &key_cache).await +} + +async fn sub_pkg_upload(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let key_cache = key_cache_from_matches(m)?; + let url = bldr_url_from_matches(m)?; + + // When packages are uploaded, they *always* go to `unstable`; + // they can optionally get added to another channel, too. + let additional_release_channel = channel_from_matches(m); + + // When packages are uploaded we check if they exist in the db + // before allowing a write to the backend, this bypasses the check + let force_upload = m.is_present("FORCE"); + + let auto_build = if m.is_present("NO_BUILD") { + BuildOnUpload::Disable + } else { + BuildOnUpload::PackageDefault + }; + + let token = auth_token_param_or_env(m)?; + let artifact_paths = m.values_of("HART_FILE").unwrap(); // Required via clap + for artifact_path in artifact_paths.map(Path::new) { + command::pkg::upload::start(ui, + &url, + &additional_release_channel, + &token, + artifact_path, + force_upload, + auto_build, + &key_cache).await?; + } + Ok(()) +} + +async fn sub_pkg_delete(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let token = auth_token_param_or_env(m)?; + let ident = required_pkg_ident_from_input(m)?; + let target = target_from_matches(m)?; + + command::pkg::delete::start(ui, &url, (&ident, target), &token).await?; + + Ok(()) +} + +fn sub_pkg_verify(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::pkg::verify::start(ui, src, &key_cache) +} + +fn sub_pkg_header(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + init()?; + + command::pkg::header::start(ui, src) +} + +fn sub_pkg_info(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let src = Path::new(required_value_of(m, "SOURCE")); + let to_json = m.is_present("TO_JSON"); + init()?; + + command::pkg::info::start(ui, src, to_json) +} + +async fn sub_pkg_promote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + let target = target_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + command::pkg::promote::start(ui, &url, (&ident, target), &channel, &token).await +} + +async fn sub_pkg_demote(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let channel = required_channel_from_matches(m); + let token = auth_token_param_or_env(m)?; + let target = target_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + command::pkg::demote::start(ui, &url, (&ident, target), &channel, &token).await +} + +async fn sub_pkg_channels(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let url = bldr_url_from_matches(m)?; + let ident = required_pkg_ident_from_input(m)?; + let token = maybe_auth_token(m); + let target = target_from_matches(m)?; + + command::pkg::channels::start(ui, &url, (&ident, target), token.as_deref()).await +} + +async fn sub_svc_set(m: &ArgMatches<'_>) -> Result<()> { + let remote_sup_addr = remote_sup_from_input(m)?; + let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; + let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; + let mut ui = ui::ui(); + let mut validate = sup_proto::ctl::SvcValidateCfg { service_group: + Some(service_group.clone().into()), + ..Default::default() }; + let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_SVC_CFG_SIZE); + let cfg_len = match m.value_of("FILE") { + Some("-") | None => io::stdin().read_to_end(&mut buf)?, + Some(f) => { + let mut file = File::open(f)?; + file.read_to_end(&mut buf)? + } + }; + if cfg_len > sup_proto::butterfly::MAX_SVC_CFG_SIZE { + ui.fatal(format!("Configuration too large. Maximum size allowed is {} bytes.", + sup_proto::butterfly::MAX_SVC_CFG_SIZE))?; + process::exit(1); + } + validate.cfg = Some(buf.clone()); + let key_cache = key_cache_from_matches(m)?; + + let mut set = sup_proto::ctl::SvcSetCfg::default(); + match (service_group.org(), user_param_or_env(m)) { + (Some(_org), Some(username)) => { + let user_key = key_cache.latest_user_secret_key(&username)?; + let service_key = key_cache.latest_service_public_key(&service_group)?; + ui.status(Status::Encrypting, + format!("TOML as {} for {}", + user_key.named_revision(), + service_key.named_revision()))?; + set.cfg = Some(user_key.encrypt_for_service(&buf, &service_key) + .to_string() + .into_bytes()); + set.is_encrypted = Some(true); + } + _ => set.cfg = Some(buf.to_vec()), + } + set.service_group = Some(service_group.into()); + set.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); + ui.begin(format!("Setting new configuration version {} for {}", + set.version + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()), + set.service_group + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()),))?; + ui.status(Status::Creating, "service configuration")?; + let mut response = SrvClient::request(Some(&remote_sup_addr), validate).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + match ErrCode::try_from(m.code) { + Ok(ErrCode::InvalidPayload) => { + ui.warn(m)?; + } + _ => return Err(SrvClientError::from(m).into()), + } + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.status(Status::Applying, format!("via peer {}", remote_sup_addr))?; + let mut response = SrvClient::request(Some(&remote_sup_addr), set).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Applied configuration")?; + Ok(()) +} + +async fn sub_svc_config(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let remote_sup_addr = remote_sup_from_input(m)?; + let msg = sup_proto::ctl::SvcGetDefaultCfg { ident: Some(ident.into()), }; + let mut response = SrvClient::request(remote_sup_addr.as_ref(), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "ServiceCfg" => { + reply.parse::() + .map_err(SrvClientError::Decode)?; + } + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + Ok(()) +} + +async fn sub_svc_load(svc_load: SvcLoad) -> Result<()> { + let remote_sup_addr = svc_load.remote_sup.clone(); + let msg = habitat_sup_protocol::ctl::SvcLoad::try_from(svc_load)?; + gateway_util::send(remote_sup_addr.inner(), msg).await +} + +async fn sub_svc_bulk_load(svc_bulk_load: SvcBulkLoad) -> Result<()> { + let mut errors = HashMap::new(); + for svc_load in svc::svc_loads_from_paths(&svc_bulk_load.svc_config_paths)? { + let ident = svc_load.pkg_ident.clone().pkg_ident(); + if let Err(e) = sub_svc_load(svc_load).await { + errors.insert(ident, e); + } + } + if errors.is_empty() { + Ok(()) + } else { + Err(errors.into()) + } +} + +async fn sub_svc_unload(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let timeout_in_seconds = + parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); + let msg = sup_proto::ctl::SvcUnload { ident: Some(ident.into()), + timeout_in_seconds }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_svc_update(u: hab::cli::hab::svc::Update) -> Result<()> { + let ctl_addr = u.remote_sup.clone(); + let msg: sup_proto::ctl::SvcUpdate = TryFrom::try_from(u)?; + gateway_util::send(ctl_addr.inner(), msg).await +} + +async fn sub_svc_start(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let msg = sup_proto::ctl::SvcStart { ident: Some(ident.into()), }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_svc_status(pkg_ident: Option, + remote_sup: Option<&ResolvedListenCtlAddr>) + -> Result<()> { + let msg = sup_proto::ctl::SvcStatus { ident: pkg_ident.map(Into::into), }; + + let mut out = TabWriter::new(io::stdout()); + let mut response = SrvClient::request(remote_sup, msg).await?; + // Ensure there is at least one result from the server otherwise produce an error + if let Some(message_result) = response.next().await { + let reply = message_result?; + print_svc_status(&mut out, &reply, true)?; + } else { + return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()); + } + while let Some(message_result) = response.next().await { + let reply = message_result?; + print_svc_status(&mut out, &reply, false)?; + } + out.flush()?; + Ok(()) +} + +async fn sub_svc_stop(m: &ArgMatches<'_>) -> Result<()> { + let ident = required_pkg_ident_from_input(m)?; + let timeout_in_seconds = + parse_optional_arg::("SHUTDOWN_TIMEOUT", m).map(u32::from); + let msg = sup_proto::ctl::SvcStop { ident: Some(ident.into()), + timeout_in_seconds }; + let remote_sup_addr = remote_sup_from_input(m)?; + gateway_util::send(remote_sup_addr.as_ref(), msg).await +} + +async fn sub_file_put(m: &ArgMatches<'_>) -> Result<()> { + let service_group = required_value_of(m, "SERVICE_GROUP").parse::()?; + let remote_sup_addr = remote_sup_from_input(m)?; + let remote_sup_addr = SrvClient::ctl_addr(remote_sup_addr.as_ref())?; + let mut ui = ui::ui(); + let mut msg = sup_proto::ctl::SvcFilePut::default(); + let file = Path::new(required_value_of(m, "FILE")); + if file.metadata()?.len() > sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES as u64 { + ui.fatal(format!("File too large. Maximum size allowed is {} bytes.", + sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES))?; + process::exit(1); + }; + msg.service_group = Some(service_group.clone().into()); + msg.version = Some(value_t!(m, "VERSION_NUMBER", u64).unwrap()); + msg.filename = Some(file.file_name().unwrap().to_string_lossy().into_owned()); + let mut buf = Vec::with_capacity(sup_proto::butterfly::MAX_FILE_PUT_SIZE_BYTES); + let key_cache = key_cache_from_matches(m)?; + + ui.begin(format!("Uploading file {} to {} incarnation {}", + file.display(), + msg.version + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UNKNOWN".to_string()), + msg.service_group + .as_ref() + .map(ToString::to_string) + .unwrap_or_else(|| "UKNOWN".to_string()),))?; + ui.status(Status::Creating, "service file")?; + File::open(file)?.read_to_end(&mut buf)?; + match (service_group.org(), user_param_or_env(m)) { + (Some(_org), Some(username)) => { + // That Some(_org) bit is really "was an org specified for + // this service group?" + let user_key = key_cache.latest_user_secret_key(&username)?; + let service_key = key_cache.latest_service_public_key(&service_group)?; + ui.status(Status::Encrypting, + format!("file as {} for {}", + user_key.named_revision(), + service_key.named_revision()))?; + msg.content = Some(user_key.encrypt_for_service(&buf, &service_key) + .to_string() + .into_bytes()); + msg.is_encrypted = Some(true); + } + _ => msg.content = Some(buf.to_vec()), + } + ui.status(Status::Applying, format!("via peer {}", remote_sup_addr)) + .unwrap(); + let mut response = SrvClient::request(Some(&remote_sup_addr), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + match ErrCode::try_from(m.code) { + Ok(ErrCode::InvalidPayload) => { + ui.warn(m)?; + } + _ => return Err(SrvClientError::from(m).into()), + } + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Uploaded file")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +async fn sub_sup_depart(member_id: String, + remote_sup: Option<&ResolvedListenCtlAddr>) + -> Result<()> { + let remote_sup = SrvClient::ctl_addr(remote_sup)?; + let mut ui = ui::ui(); + let msg = sup_proto::ctl::SupDepart { member_id: Some(member_id), }; + + ui.begin(format!("Permanently marking {} as departed", + msg.member_id.as_deref().unwrap_or("UNKNOWN"))) + .unwrap(); + ui.status(Status::Applying, format!("via peer {}", remote_sup)) + .unwrap(); + let mut response = SrvClient::request(Some(&remote_sup), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Departure recorded.")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +async fn sub_sup_restart(remote_sup: Option<&ResolvedListenCtlAddr>) -> Result<()> { + let remote_sup = SrvClient::ctl_addr(remote_sup)?; + let mut ui = ui::ui(); + let msg = sup_proto::ctl::SupRestart::default(); + + ui.begin(format!("Restarting supervisor {}", remote_sup))?; + let mut response = SrvClient::request(Some(&remote_sup), msg).await?; + while let Some(message_result) = response.next().await { + let reply = message_result?; + match reply.message_id() { + "NetOk" => (), + "NetErr" => { + let m = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(m).into()); + } + _ => return Err(SrvClientError::from(io::Error::from(io::ErrorKind::UnexpectedEof)).into()), + } + } + ui.end("Restart recorded.")?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +fn sub_sup_secret_generate() -> Result<()> { + let mut ui = ui::ui(); + let mut buf = String::new(); + sup_proto::generate_secret_key(&mut buf); + ui.info(buf)?; + Ok(()) +} + +#[cfg(not(target_os = "macos"))] +fn sub_sup_secret_generate_key(subject_alternative_name: &DnsName, path: PathBuf) -> Result<()> { + Ok(ctl_gateway_tls::generate_self_signed_certificate_and_key(subject_alternative_name, path) + .map_err(habitat_core::Error::from)?) +} + +fn sub_supportbundle(ui: &mut UI) -> Result<()> { + init()?; + + command::supportbundle::start(ui) +} + +fn sub_ring_key_export(m: &ArgMatches<'_>) -> Result<()> { + let ring = required_value_of(m, "RING"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::ring::key::export::start(ring, &key_cache) +} + +fn sub_ring_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let ring = required_value_of(m, "RING"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::ring::key::generate::start(ui, ring, &key_cache) +} + +fn sub_ring_key_import(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let mut content = String::new(); + let key_cache = key_cache_from_matches(m)?; + init()?; + io::stdin().read_to_string(&mut content)?; + + // Trim the content to lose line feeds added by Powershell pipeline + command::ring::key::import::start(ui, content.trim(), &key_cache) +} + +fn sub_service_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let org = org_param_or_env(m)?; + let service_group = required_value_of(m, "SERVICE_GROUP").parse()?; + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::service::key::generate::start(ui, &org, &service_group, &key_cache) +} + +fn sub_user_key_generate(ui: &mut UI, m: &ArgMatches<'_>) -> Result<()> { + let user = required_value_of(m, "USER"); + let key_cache = key_cache_from_matches(m)?; + init()?; + + command::user::key::generate::start(ui, user, &key_cache) +} + +fn args_after_first(args_to_skip: usize) -> Vec { + env::args_os().skip(args_to_skip).collect() +} + +/// Check to see if the user has passed in an AUTH_TOKEN param. If not, check the +/// HAB_AUTH_TOKEN env var. If not, check the CLI config to see if there is a default auth +/// token set. If that's empty too, then error. +fn auth_token_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("AUTH_TOKEN") { + Some(o) => Ok(o.to_string()), + None => { + match henv::var(AUTH_TOKEN_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + CliConfig::load()?.auth_token.ok_or_else(|| { + Error::ArgumentError("No auth token \ + specified" + .into()) + }) + } + } + } + } +} + +/// Check to see if an auth token exists and convert it to a string slice if it does. Unlike +/// auth_token_param_or_env, it's ok for no auth token to be present here. This is useful for +/// commands that can optionally take an auth token for operating on private packages. +fn maybe_auth_token(m: &ArgMatches<'_>) -> Option { + match auth_token_param_or_env(m) { + Ok(t) => Some(t), + Err(_) => None, + } +} + +/// Check to see if the user has passed in an ORIGIN param. If not, check the HABITAT_ORIGIN env +/// var. If not, check the CLI config to see if there is a default origin set. If that's empty too, +/// then error. +// TODO (CM): sort out types better... there's a conflict with the CLI +// Origin in this module +fn origin_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("ORIGIN") { + Some(o) => Ok(o.parse()?), + None => { + match henv::var(ORIGIN_ENVVAR) { + Ok(v) => Ok(v.parse()?), + Err(_) => { + CliConfig::load()?.origin.ok_or_else(|| { + Error::CryptoCLI("No origin specified".to_string()) + }) + } + } + } + } +} + +/// Check to see if the user has passed in an ORG param. +/// If not, check the HABITAT_ORG env var. If that's +/// empty too, then error. +fn org_param_or_env(m: &ArgMatches<'_>) -> Result { + match m.value_of("ORG") { + Some(o) => Ok(o.to_string()), + None => henv::var(HABITAT_ORG_ENVVAR) + .map_err(|_| Error::CryptoCLI("No organization specified".to_string())), + } +} + +/// Check to see if the user has passed in a Builder URL param. If not, check the HAB_BLDR_URL env +/// var. If not, check the CLI config to see if there is a default url set. If that's empty too, +/// then we'll use the default (https://bldr.habitat.sh). +fn bldr_url_from_matches(matches: &ArgMatches<'_>) -> Result { + match matches.value_of("BLDR_URL") { + Some(url) => Ok(url.to_string()), + None => { + match henv::var(BLDR_URL_ENVVAR) { + Ok(v) => Ok(v), + Err(_) => { + let config = CliConfig::load()?; + match config.bldr_url { + Some(v) => Ok(v), + None => Ok(default_bldr_url()), + } + } + } + } + } +} + +/// Resolve a channel. Taken from the environment or from CLI args, if +/// given. +fn channel_from_matches(matches: &ArgMatches<'_>) -> Option { + matches.value_of("CHANNEL").map(ChannelIdent::from) +} + +/// Resolve a channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + channel_from_matches(matches).unwrap() +} + +/// Resolve a target channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_target_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + matches.value_of("TARGET_CHANNEL") + .map(ChannelIdent::from) + .expect("TARGET_CHANNEL is a required argument!") +} + +/// Resolve a source channel. Taken from the environment or from CLI args. This +/// should only be called when the argument is required by the CLAP config, +/// otherwise this would panic. +fn required_source_channel_from_matches(matches: &ArgMatches<'_>) -> ChannelIdent { + matches.value_of("SOURCE_CHANNEL") + .map(ChannelIdent::from) + .expect("SOURCE_CHANNEl is a required argument!") +} +/// Resolve a channel. Taken from the environment or from CLI args, if +/// given or return the default channel value. +fn channel_from_matches_or_default(matches: &ArgMatches<'_>) -> ChannelIdent { + channel_from_matches(matches).unwrap_or_else(ChannelIdent::configured_value) +} + +/// Resolve a target. Default to x86_64-linux if none specified +fn target_from_matches(matches: &ArgMatches<'_>) -> Result { + matches.value_of("PKG_TARGET") + .map(PackageTarget::from_str) + .unwrap_or_else(|| Ok(active_target())) + .map_err(Error::HabitatCore) +} + +/// Return the path to create our binlinks in, or None if no binlinking should occur +fn binlink_dest_dir_from_matches(matches: &ArgMatches<'_>) -> Option { + // is_present always returns true since BINLINK_DIR has a default value, so we need to use + // occurrences_of to determine whether we actually want to do the binlinking + if matches.is_present("BINLINK") || matches.occurrences_of("BINLINK_DIR") > 0 { + matches.value_of("BINLINK_DIR").map(PathBuf::from) + } else { + None + } +} + +/// Helper function to determine active package target. +/// It overrides x86_64-darwin to be x86_64-linux in order +/// to provide a better user experience (ie, for the 99% case) +fn active_target() -> PackageTarget { + match PackageTarget::active_target() { + #[cfg(feature = "supported_targets")] + target::X86_64_DARWIN => target::X86_64_LINUX, + t => t, + } +} + +fn install_sources_from_matches(matches: &ArgMatches<'_>) -> Result> { + matches + .values_of("PKG_IDENT_OR_ARTIFACT") + .unwrap() // Required via clap + .map(|t| t.parse().map_err(Error::from)) + .collect() +} + +fn idents_from_matches(matches: &ArgMatches<'_>) -> Result> { + match matches.values_of("PKG_IDENT") { + Some(ident_strings) => { + ident_strings.map(|t| PackageIdent::from_str(t).map_err(Error::from)) + .collect() + } + _ => Ok(Vec::new()), // It's not an error to have no idents on command line + } +} + +fn idents_from_file_matches(ui: &mut UI, + matches: &ArgMatches<'_>, + cli_channel: &ChannelIdent, + cli_target: PackageTarget) + -> Result> { + let mut sources: Vec = Vec::new(); + + if let Some(files) = matches.values_of("PKG_IDENT_FILE") { + for f in files { + let filename = &f.to_string(); + if habitat_common::cli::is_toml_file(filename) { + let mut package_sets = idents_from_toml_file(ui, filename)?; + sources.append(&mut package_sets) + } else { + let idents_from_file = habitat_common::cli::file_into_idents(filename)?; + let package_set = PackageSet { idents: idents_from_file, + channel: cli_channel.clone(), + target: cli_target, }; + sources.push(package_set) + } + } + } + Ok(sources) +} + +fn idents_from_toml_file(ui: &mut UI, filename: &str) -> Result> { + let mut sources: Vec = Vec::new(); + + let file_data = std::fs::read_to_string(filename)?; + let toml_data: PackageSetFile = + toml::from_str(&file_data).map_err(habitat_common::Error::TomlParser)?; + + // We currently only accept version 1 + if toml_data.format_version.unwrap_or(1) != 1 { + return Err(Error::PackageSetParseError(format!( + "format_version invalid, only version 1 allowed ({} provided", + toml_data.format_version.unwrap() + ))); + } + + ui.status(Status::Using, + format!("File {}, '{}'", + filename, + toml_data.file_descriptor.unwrap_or_default()))?; + + for (target, target_array) in toml_data.targets { + for package_set_value in target_array { + let channel = package_set_value.channel; + let idents: Vec = strings_to_idents(&package_set_value.packages)?; + let package_set = PackageSet { target, + channel, + idents }; + debug!("Package Set {:?}", package_set); + sources.push(package_set) + } + } + Ok(sources) +} + +fn strings_to_idents(strings: &[String]) -> Result> { + let ident_or_results: Result> = + strings.iter() + .map(|s| PackageIdent::from_str(s).map_err(Error::from)) + .collect(); + ident_or_results +} + +fn verify_from_matches(matches: &ArgMatches<'_>) -> bool { matches.is_present("VERIFY") } +fn ignore_missing_seeds_from_matches(matches: &ArgMatches<'_>) -> bool { + matches.is_present("IGNORE_MISSING_SEEDS") +} + +fn download_dir_from_matches(matches: &ArgMatches<'_>) -> Option { + matches.value_of("DOWNLOAD_DIRECTORY").map(PathBuf::from) +} + +fn excludes_from_matches(matches: &ArgMatches<'_>) -> Vec { + matches + .values_of("EXCLUDE") + .unwrap_or_default() + .map(|i| PackageIdent::from_str(i).unwrap()) // unwrap safe as we've validated the input + .collect() +} + +fn print_svc_status(out: &mut T, + reply: &SrvMessage, + print_header: bool) + -> result::Result<(), SrvClientError> + where T: io::Write +{ + let status = match reply.message_id() { + "ServiceStatus" => { + reply.parse::() + .map_err(SrvClientError::Decode)? + } + "NetOk" => { + println!("No services loaded."); + return Ok(()); + } + "NetErr" => { + let err = reply.parse::() + .map_err(SrvClientError::Decode)?; + return Err(SrvClientError::from(err)); + } + _ => { + warn!("Unexpected status message, {:?}", reply); + return Ok(()); + } + }; + let svc_desired_state = status.desired_state + .map_or("".to_string(), |s| s.to_string()); + let (svc_state, svc_pid, svc_elapsed) = { + match status.process { + Some(process) => { + (process.state.to_string(), + process.pid + .map_or_else(|| "".to_string(), |p| p.to_string()), + process.elapsed.unwrap_or_default().to_string()) + } + None => { + (ProcessState::default().to_string(), "".to_string(), "".to_string()) + } + } + }; + if print_header { + writeln!(out, "{}", STATUS_HEADER.join("\t")).unwrap(); + } + // Composites were removed in 0.75 but people could be + // depending on the exact format of this output even if they + // never used composites. We don't want to break their tooling + // so we hardcode in 'standalone' as it's the only supported + // package type + // + // TODO: Remove this when we have a stable machine-readable alternative + // that scripts could depend on + writeln!(out, + "{}\tstandalone\t{}\t{}\t{}\t{}\t{}", + status.ident, + DesiredState::from_str(&svc_desired_state)?, + ProcessState::from_str(&svc_state)?, + svc_elapsed, + svc_pid, + status.service_group,)?; + Ok(()) +} + +fn bulkupload_dir_from_matches(matches: &ArgMatches<'_>) -> PathBuf { + matches.value_of("UPLOAD_DIRECTORY") + .map(PathBuf::from) + .expect("CLAP-validated upload dir") +} + +fn remote_sup_from_input(m: &ArgMatches<'_>) -> Result> { + Ok(m.value_of("REMOTE_SUP") + .map(ResolvedListenCtlAddr::from_str) + .transpose()?) +} + +fn required_pkg_ident_from_input(m: &ArgMatches<'_>) -> Result { + Ok(m.value_of("PKG_IDENT") + .expect("PKG_IDENT is a required argument") + .parse()?) +} + +/// Check to see if the user has passed in a USER param. +/// If not, check the HAB_USER env var. If that's +/// empty too, then return an error. +fn user_param_or_env(m: &ArgMatches<'_>) -> Option { + match m.value_of("USER") { + Some(u) => Some(u.to_string()), + None => { + match env::var(HABITAT_USER_ENVVAR) { + Ok(v) => Some(v), + Err(_) => None, + } + } + } +} + +/// Helper function to get information about the argument given its name +fn required_value_of<'a>(matches: &'a ArgMatches<'a>, name: &str) -> &'a str { + matches.value_of(name) + .unwrap_or_else(|| panic!("{} CLAP required arg missing", name)) +} + +#[cfg(test)] +mod test { + use super::*; + + mod binlink_dest_dir_from_matches { + use super::*; + + habitat_core::locked_env_var!(HAB_BINLINK_DIR, lock_binlink_env_var); + + #[test] + fn no_binlink_arg() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none(), + "without a --binlink arg, there should be no BINLINK matches"); + } + + #[test] + fn env_var_but_no_binlink_arg() { + let env_var = lock_binlink_env_var(); + env_var.set("/val/from/env/var"); + + assert!(dest_dir_from_pkg_install(&["origin/pkg"]).is_none()); + } + + #[test] + #[should_panic(expected = "Invalid value")] + fn env_var_empty() { + let env_var = lock_binlink_env_var(); + env_var.set(""); + + dest_dir_from_pkg_install(&["origin/pkg"]); + } + + #[test] + fn env_var_overrides_binlink_default() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink"]), + Some(env_var_val.into()), + "with a no-value --binlink arg, the env var value should override the \ + default"); + } + + #[test] + fn binlink_dir_implies_binlink() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", "--binlink-dir", arg_val]), + Some(arg_val.into())); + } + + #[test] + fn arg_val_overrides_default() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", + "--binlink", + "--binlink-dir", + arg_val]), + Some(arg_val.into()), + "The --binlink value should override the default"); + } + + #[test] + fn arg_val_overrides_env_var() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + let arg_val = "/val/from/args"; + assert_ne!(arg_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + assert_eq!(dest_dir_from_pkg_install(&["origin/pkg", + "--binlink", + "--binlink-dir", + arg_val]), + Some(arg_val.into()), + "The --binlink value should override the env var value"); + } + + #[test] + fn binlink_before_pkg_ident_ok() { + let env_var = lock_binlink_env_var(); + env_var.unset(); + + assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), + Some(habitat_common::cli::DEFAULT_BINLINK_DIR.into())); + } + + #[test] + fn binlink_before_pkg_ident_with_env_var_ok() { + let env_var = lock_binlink_env_var(); + let env_var_val = "/val/from/env/var"; + env_var.set(env_var_val); + assert_ne!(env_var_val, habitat_common::cli::DEFAULT_BINLINK_DIR); + + assert_eq!(dest_dir_from_pkg_install(&["--binlink", "origin/pkg"]), + Some(env_var_val.into())); + } + + fn matches_for_pkg_install<'a>(pkg_install_args: &'a [&'a str]) -> ArgMatches<'a> { + let pre_pkg_install_args = &["hab", "pkg", "install"]; + let app_matches = cli::get(FeatureFlag::empty()) + .get_matches_from_safe(pre_pkg_install_args.iter().chain(pkg_install_args.iter())) + .unwrap(); // Force panics on CLAP errors, so we can use #[should_panic] + match app_matches.subcommand() { + ("pkg", Some(matches)) => { + match matches.subcommand() { + ("install", Some(m)) => { + println!("{:#?}", m); + m.clone() + } + _ => unreachable!(), + } + } + _ => unreachable!(), + } + } + + fn dest_dir_from_pkg_install(pkg_install_args: &[&str]) -> Option { + let pkg_install_matches = &matches_for_pkg_install(pkg_install_args); + binlink_dest_dir_from_matches(pkg_install_matches) + } + } +} diff --git a/components/hab/src/main_v4.rs b/components/hab/src/main_v4.rs new file mode 100644 index 0000000000..23034fe897 --- /dev/null +++ b/components/hab/src/main_v4.rs @@ -0,0 +1,24 @@ +#[cfg(feature = "v4")] +use habitat_common::{ui::{UIWriter, + UI}, + FeatureFlag}; + +#[cfg(feature = "v4")] +use hab::cli_driver; + +#[cfg(feature = "v4")] +pub(crate) async fn main_v4() { + let mut ui = UI::default_with_env(); + let features = FeatureFlag::from_env(&mut ui); + if let Err(e) = cli_driver(&mut ui, features).await { + let exit_code = e.exit_code(); + ui.fatal(e).unwrap(); + std::process::exit(exit_code) + } +} + +// Hack required for now to have this compile when v4 is not enabled +#[cfg(not(feature = "v4"))] +pub(crate) async fn main_v4() { + unreachable!(); +} diff --git a/components/pkg-export-tar/src/lib.rs b/components/pkg-export-tar/src/lib.rs index a4be3c6364..a662d47ffc 100644 --- a/components/pkg-export-tar/src/lib.rs +++ b/components/pkg-export-tar/src/lib.rs @@ -40,7 +40,7 @@ async fn export_for_cli_matches(ui: &mut UI, cli: &cli::Cli) -> Result<()> { async fn export(ui: &mut UI, build_spec: BuildSpec<'_>) -> Result<()> { let hab_pkg = build_spec.hab; - let build_result = build_spec.create(ui).await.unwrap(); + let build_result = build_spec.create(ui).await?; let builder_dir_path = build_result.0.path(); let pkg_ident = build_result.1;