diff --git a/Cargo.lock b/Cargo.lock index e20bdad7e17dbf..d55c1a19aa3b38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1275,7 +1275,7 @@ dependencies = [ "deno_npm", "deno_npm_cache", "deno_package_json", - "deno_path_util 0.3.0", + "deno_path_util", "deno_resolver", "deno_runtime", "deno_semver", @@ -1434,6 +1434,7 @@ version = "0.178.0" dependencies = [ "async-trait", "deno_core", + "deno_error", "thiserror 2.0.3", "tokio", "uuid", @@ -1445,6 +1446,7 @@ version = "0.116.0" dependencies = [ "async-trait", "deno_core", + "deno_error", "rusqlite", "serde", "sha2", @@ -1467,7 +1469,7 @@ dependencies = [ "data-url", "deno_error", "deno_media_type", - "deno_path_util 0.3.0", + "deno_path_util", "http 1.1.0", "indexmap 2.3.0", "log", @@ -1486,6 +1488,7 @@ name = "deno_canvas" version = "0.53.0" dependencies = [ "deno_core", + "deno_error", "deno_webgpu", "image", "serde", @@ -1494,13 +1497,14 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.42.0" +version = "0.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45aaf31e58ca915d5c0746bf8e2d07b94635154ad9e5afe5ff265cae6187b19" +checksum = "6c4c11bd51ef6738cabfc3c53f16c209a0b8615cb1e4e5bf3b14e3b5deebfe21" dependencies = [ - "anyhow", + "boxed_error", + "deno_error", "deno_package_json", - "deno_path_util 0.3.0", + "deno_path_util", "deno_semver", "glob", "ignore", @@ -1513,7 +1517,7 @@ dependencies = [ "serde", "serde_json", "sys_traits", - "thiserror 1.0.64", + "thiserror 2.0.3", "url", ] @@ -1526,9 +1530,9 @@ dependencies = [ [[package]] name = "deno_core" -version = "0.327.0" +version = "0.330.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaf8dff204b9c2415deb47b9f30d4d38b0925d0d88f1f9074e8e76f59e6d7ded" +checksum = "fd38bbbd68ed873165ccb630322704b44140d3a8c8d50f898beac4d1a8a3358c" dependencies = [ "anyhow", "az", @@ -1539,6 +1543,7 @@ dependencies = [ "capacity_builder 0.1.3", "cooked-waker", "deno_core_icudata", + "deno_error", "deno_ops", "deno_unsync", "futures", @@ -1554,6 +1559,7 @@ dependencies = [ "smallvec", "sourcemap 8.0.1", "static_assertions", + "thiserror 2.0.3", "tokio", "url", "v8", @@ -1574,6 +1580,7 @@ dependencies = [ "async-trait", "chrono", "deno_core", + "deno_error", "saffron", "thiserror 2.0.3", "tokio", @@ -1592,6 +1599,7 @@ dependencies = [ "ctr", "curve25519-dalek", "deno_core", + "deno_error", "deno_web", "ed448-goldilocks", "elliptic-curve", @@ -1618,16 +1626,17 @@ dependencies = [ [[package]] name = "deno_doc" -version = "0.161.3" +version = "0.164.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353a39c70d248af04600928cefc8066a9e4535fb6e7d7c518411e5efc822819f" +checksum = "ad1edb02603c7e8a4003c84af2482a05e5eda3a14f1af275434fda89223f054d" dependencies = [ "anyhow", "cfg-if", "comrak", "deno_ast", "deno_graph", - "deno_path_util 0.2.2", + "deno_path_util", + "deno_terminal 0.2.0", "handlebars", "html-escape", "import_map", @@ -1647,22 +1656,23 @@ dependencies = [ [[package]] name = "deno_error" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "199c66ffd17ee1a948904d33f3d3f364573951c1f9fb3f859bfe7770bf33862a" +checksum = "c4da6a58de6932a96f84e133c072fd3b525966ee122a71f3efd48bbff2eed5ac" dependencies = [ "deno_error_macro", "libc", "serde", "serde_json", + "tokio", "url", ] [[package]] name = "deno_error_macro" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd99df6ae75443907e1f959fc42ec6dcea67a7bd083e76cf23a117102c9a2ce" +checksum = "46351dff93aed2039407c91e2ded2a5591e42d2795ab3d111288625bb710d3d2" dependencies = [ "proc-macro2", "quote", @@ -1677,7 +1687,8 @@ dependencies = [ "bytes", "data-url", "deno_core", - "deno_path_util 0.3.0", + "deno_error", + "deno_path_util", "deno_permissions", "deno_tls", "dyn-clone", @@ -1710,6 +1721,7 @@ name = "deno_ffi" version = "0.171.0" dependencies = [ "deno_core", + "deno_error", "deno_permissions", "dlopen2", "dynasmrt", @@ -1733,8 +1745,9 @@ dependencies = [ "base32", "boxed_error", "deno_core", + "deno_error", "deno_io", - "deno_path_util 0.3.0", + "deno_path_util", "deno_permissions", "filetime", "junction", @@ -1750,17 +1763,17 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.86.9" +version = "0.87.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa96b7d353c0d36b108ec504272b148792f48dccaf29f302b39c46b43457bb94" +checksum = "f56d4eb4b7c81ae920b6d18c45a1866924f93110caee80bbbc362dc28143f2bb" dependencies = [ - "anyhow", "async-trait", "capacity_builder 0.5.0", "data-url", "deno_ast", + "deno_error", "deno_media_type", - "deno_path_util 0.3.0", + "deno_path_util", "deno_semver", "deno_unsync", "encoding_rs", @@ -1794,6 +1807,7 @@ dependencies = [ "bytes", "cache_control", "deno_core", + "deno_error", "deno_net", "deno_websocket", "flate2", @@ -1827,6 +1841,7 @@ version = "0.94.0" dependencies = [ "async-trait", "deno_core", + "deno_error", "filetime", "fs3", "libc", @@ -1853,8 +1868,9 @@ dependencies = [ "bytes", "chrono", "deno_core", + "deno_error", "deno_fetch", - "deno_path_util 0.3.0", + "deno_path_util", "deno_permissions", "deno_tls", "denokv_proto", @@ -1920,6 +1936,7 @@ name = "deno_napi" version = "0.115.0" dependencies = [ "deno_core", + "deno_error", "deno_permissions", "libc", "libloading 0.7.4", @@ -1948,6 +1965,7 @@ name = "deno_net" version = "0.176.0" dependencies = [ "deno_core", + "deno_error", "deno_permissions", "deno_tls", "hickory-proto", @@ -1977,13 +1995,14 @@ dependencies = [ "const-oid", "data-encoding", "deno_core", + "deno_error", "deno_fetch", "deno_fs", "deno_io", "deno_media_type", "deno_net", "deno_package_json", - "deno_path_util 0.3.0", + "deno_path_util", "deno_permissions", "deno_whoami", "der", @@ -2085,7 +2104,7 @@ dependencies = [ "deno_core", "deno_error", "deno_npm", - "deno_path_util 0.3.0", + "deno_path_util", "deno_semver", "deno_unsync", "faster-hex", @@ -2107,10 +2126,11 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.203.0" +version = "0.206.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b146ca74cac431843486ade58e2accc16c11315fb2c6934590a52a73c56b7ec3" +checksum = "4c25ffa9d088ea00748dbef870bba110ac22ebf8cf7b2e9eb288409c5d852af3" dependencies = [ + "indexmap 2.3.0", "proc-macro-rules", "proc-macro2", "quote", @@ -2118,7 +2138,7 @@ dependencies = [ "strum", "strum_macros", "syn 2.0.87", - "thiserror 1.0.64", + "thiserror 2.0.3", ] [[package]] @@ -2129,7 +2149,7 @@ checksum = "e1d3c0f699ba2040669204ce24ab73720499fc290af843e4ce0fc8a9b3d67735" dependencies = [ "boxed_error", "deno_error", - "deno_path_util 0.3.0", + "deno_path_util", "deno_semver", "indexmap 2.3.0", "serde", @@ -2139,18 +2159,6 @@ dependencies = [ "url", ] -[[package]] -name = "deno_path_util" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b02c7d341e1b2cf089daff0f4fb2b4be8f3b5511b1d96040b3f7ed63a66c737b" -dependencies = [ - "deno_error", - "percent-encoding", - "thiserror 2.0.3", - "url", -] - [[package]] name = "deno_path_util" version = "0.3.0" @@ -2170,7 +2178,8 @@ version = "0.43.0" dependencies = [ "capacity_builder 0.5.0", "deno_core", - "deno_path_util 0.3.0", + "deno_error", + "deno_path_util", "deno_terminal 0.2.0", "fqdn", "libc", @@ -2192,9 +2201,10 @@ dependencies = [ "boxed_error", "dashmap", "deno_config", + "deno_error", "deno_media_type", "deno_package_json", - "deno_path_util 0.3.0", + "deno_path_util", "deno_semver", "node_resolver", "sys_traits", @@ -2216,6 +2226,7 @@ dependencies = [ "deno_core", "deno_cron", "deno_crypto", + "deno_error", "deno_fetch", "deno_ffi", "deno_fs", @@ -2225,7 +2236,7 @@ dependencies = [ "deno_napi", "deno_net", "deno_node", - "deno_path_util 0.3.0", + "deno_path_util", "deno_permissions", "deno_telemetry", "deno_terminal 0.2.0", @@ -2314,6 +2325,7 @@ version = "0.6.0" dependencies = [ "async-trait", "deno_core", + "deno_error", "http-body-util", "hyper 1.4.1", "hyper-util", @@ -2326,6 +2338,7 @@ dependencies = [ "opentelemetry_sdk", "pin-project", "serde", + "thiserror 2.0.3", "tokio", ] @@ -2354,6 +2367,7 @@ name = "deno_tls" version = "0.171.0" dependencies = [ "deno_core", + "deno_error", "deno_native_certs", "rustls", "rustls-pemfile", @@ -2406,6 +2420,7 @@ dependencies = [ "deno_bench_util", "deno_console", "deno_core", + "deno_error", "deno_webidl", "thiserror 2.0.3", "urlpattern", @@ -2421,6 +2436,7 @@ dependencies = [ "deno_bench_util", "deno_console", "deno_core", + "deno_error", "deno_permissions", "deno_url", "deno_webidl", @@ -2438,6 +2454,7 @@ name = "deno_webgpu" version = "0.151.0" dependencies = [ "deno_core", + "deno_error", "raw-window-handle", "serde", "thiserror 2.0.3", @@ -2460,6 +2477,7 @@ version = "0.189.0" dependencies = [ "bytes", "deno_core", + "deno_error", "deno_net", "deno_permissions", "deno_tls", @@ -2481,6 +2499,7 @@ name = "deno_webstorage" version = "0.179.0" dependencies = [ "deno_core", + "deno_error", "deno_web", "rusqlite", "thiserror 2.0.3", @@ -2498,13 +2517,13 @@ dependencies = [ [[package]] name = "denokv_proto" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7ba1f99ed11a9c11e868a8521b1f71a7e1aba785d7f42ea9ecbdc01146c89ec" +checksum = "d5b77de4d3b9215e14624d4f4eb16cb38c0810e3f5860ba3b3fc47d0537f9a4d" dependencies = [ - "anyhow", "async-trait", "chrono", + "deno_error", "futures", "num-bigint", "prost", @@ -2514,15 +2533,15 @@ dependencies = [ [[package]] name = "denokv_remote" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08ed833073189e8f6d03155fe3b05a024e75e29d8a28a4c2e9ec3b5c925e727b" +checksum = "c6497c28eec268ed99f1e8664f0842935f02d1508529c67d94c57ca5d893d743" dependencies = [ - "anyhow", "async-stream", "async-trait", "bytes", "chrono", + "deno_error", "denokv_proto", "futures", "http 1.1.0", @@ -2531,6 +2550,7 @@ dependencies = [ "rand", "serde", "serde_json", + "thiserror 2.0.3", "tokio", "tokio-util", "url", @@ -2539,14 +2559,14 @@ dependencies = [ [[package]] name = "denokv_sqlite" -version = "0.8.4" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b790f01d1302d53a0c3cbd27de88a06b3abd64ec8ab8673924e490541c7c713" +checksum = "dc0f21a450a35eb85760761401fddf9bfff9840127be07a6ca5c31863127913d" dependencies = [ - "anyhow", "async-stream", "async-trait", "chrono", + "deno_error", "denokv_proto", "futures", "hex", @@ -2555,7 +2575,7 @@ dependencies = [ "rand", "rusqlite", "serde_json", - "thiserror 1.0.64", + "thiserror 2.0.3", "tokio", "tokio-stream", "uuid", @@ -4331,16 +4351,18 @@ dependencies = [ [[package]] name = "import_map" -version = "0.20.1" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "351a787decc56f38d65d16d32687265045d6d6a4531b4a0e1b649def3590354e" +checksum = "1215d4d92511fbbdaea50e750e91f2429598ef817f02b579158e92803b52c00a" dependencies = [ + "boxed_error", + "deno_error", "indexmap 2.3.0", "log", "percent-encoding", "serde", "serde_json", - "thiserror 1.0.64", + "thiserror 2.0.3", "url", ] @@ -5106,9 +5128,10 @@ dependencies = [ "anyhow", "async-trait", "boxed_error", + "deno_error", "deno_media_type", "deno_package_json", - "deno_path_util 0.3.0", + "deno_path_util", "futures", "lazy-regex", "once_cell", @@ -6846,14 +6869,15 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.236.0" +version = "0.239.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e23b3abce64010612f88f4ff689a959736f99eb3dc0dbf1c7903434b8bd8cda5" +checksum = "3caa6d882827148e5d9052d9d8d6d1c9d6ad426ed00cab46cafb8c07a0e7126a" dependencies = [ + "deno_error", "num-bigint", "serde", "smallvec", - "thiserror 1.0.64", + "thiserror 2.0.3", "v8", ] @@ -8506,9 +8530,9 @@ dependencies = [ [[package]] name = "v8" -version = "130.0.2" +version = "130.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee0be58935708fa4d7efb970c6cf9f2d9511d24ee24246481a65b6ee167348d" +checksum = "a511192602f7b435b0a241c1947aa743eb7717f20a9195f4b5e8ed1952e01db1" dependencies = [ "bindgen", "bitflags 2.6.0", @@ -8706,11 +8730,12 @@ dependencies = [ [[package]] name = "wasm_dep_analyzer" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f270206a91783fd90625c8bb0d8fbd459d0b1d1bf209b656f713f01ae7c04b8" +checksum = "2eeee3bdea6257cc36d756fa745a70f9d393571e47d69e0ed97581676a5369ca" dependencies = [ - "thiserror 1.0.64", + "deno_error", + "thiserror 2.0.3", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 8e7d446b01e337..75572162c4cc67 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -48,10 +48,10 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.44.0", features = ["transpiling"] } -deno_core = { version = "0.327.0" } +deno_core = { version = "0.330.0" } deno_bench_util = { version = "0.178.0", path = "./bench_util" } -deno_config = { version = "=0.42.0", features = ["workspace", "sync"] } +deno_config = { version = "=0.43.0", features = ["workspace", "sync"] } deno_lockfile = "=0.24.0" deno_media_type = { version = "0.2.3", features = ["module_specifier"] } deno_npm = "=0.27.0" @@ -63,10 +63,10 @@ deno_terminal = "0.2.0" napi_sym = { version = "0.114.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } -denokv_proto = "0.8.4" -denokv_remote = "0.8.4" +denokv_proto = "0.9.0" +denokv_remote = "0.9.0" # denokv_sqlite brings in bundled sqlite if we don't disable the default features -denokv_sqlite = { default-features = false, version = "0.8.4" } +denokv_sqlite = { default-features = false, version = "0.9.0" } # exts deno_broadcast_channel = { version = "0.178.0", path = "./ext/broadcast_channel" } @@ -119,7 +119,7 @@ dashmap = "5.5.3" data-encoding = "2.3.3" data-url = "=0.3.1" deno_cache_dir = "=0.16.0" -deno_error = "=0.5.2" +deno_error = "=0.5.3" deno_package_json = { version = "0.4.0", default-features = false } deno_unsync = "0.4.2" dlopen2 = "0.6.1" diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a2cbd746ca6b68..b77c904c40f86f 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -62,6 +62,7 @@ serde_json.workspace = true zstd.workspace = true glibc_version = "0.1.2" flate2 = { workspace = true, features = ["default"] } +deno_error.workspace = true [target.'cfg(windows)'.build-dependencies] winapi.workspace = true @@ -72,9 +73,9 @@ deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposa deno_cache_dir.workspace = true deno_config.workspace = true deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } -deno_doc = { version = "=0.161.3", features = ["rust", "comrak"] } +deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] } deno_error.workspace = true -deno_graph = { version = "=0.86.9" } +deno_graph = { version = "=0.87.0" } deno_lint = { version = "=0.68.2", features = ["docs"] } deno_lockfile.workspace = true deno_npm.workspace = true @@ -124,7 +125,7 @@ http.workspace = true http-body.workspace = true http-body-util.workspace = true hyper-util.workspace = true -import_map = { version = "=0.20.1", features = ["ext"] } +import_map = { version = "=0.21.0", features = ["ext"] } indexmap.workspace = true jsonc-parser = { workspace = true, features = ["cst", "serde"] } jupyter_runtime = { package = "runtimelib", version = "=0.19.0", features = ["tokio-runtime"] } diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs index bc4c92638a9ed2..976992aac8287b 100644 --- a/cli/args/lockfile.rs +++ b/cli/args/lockfile.rs @@ -10,6 +10,7 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::parking_lot::MutexGuard; use deno_core::serde_json; +use deno_error::JsErrorBox; use deno_lockfile::Lockfile; use deno_lockfile::WorkspaceMemberConfig; use deno_package_json::PackageJsonDepValue; @@ -59,6 +60,14 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> { } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[error("Failed writing lockfile")] +#[class(inherit)] +struct AtomicWriteFileWithRetriesError { + #[source] + source: std::io::Error, +} + impl CliLockfile { /// Get the inner deno_lockfile::Lockfile. pub fn lock(&self) -> Guard { @@ -78,7 +87,7 @@ impl CliLockfile { self.lockfile.lock().overwrite } - pub fn write_if_changed(&self) -> Result<(), AnyError> { + pub fn write_if_changed(&self) -> Result<(), JsErrorBox> { if self.skip_write { return Ok(()); } @@ -96,7 +105,9 @@ impl CliLockfile { &bytes, cache::CACHE_PERM, ) - .context("Failed writing lockfile.")?; + .map_err(|source| { + JsErrorBox::from_err(AtomicWriteFileWithRetriesError { source }) + })?; lockfile.has_content_changed = false; Ok(()) } @@ -255,7 +266,7 @@ impl CliLockfile { }) } - pub fn error_if_changed(&self) -> Result<(), AnyError> { + pub fn error_if_changed(&self) -> Result<(), JsErrorBox> { if !self.frozen { return Ok(()); } @@ -267,9 +278,7 @@ impl CliLockfile { let diff = crate::util::diff::diff(&contents, &new_contents); // has an extra newline at the end let diff = diff.trim_end(); - Err(deno_core::anyhow::anyhow!( - "The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}" - )) + Err(JsErrorBox::generic(format!("The lockfile is out of date. Run `deno install --frozen=false`, or rerun with `--frozen=false` to update it.\nchanges:\n{diff}"))) } else { Ok(()) } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 35f79a9c3ee94d..ebd321a20a5228 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -26,6 +26,7 @@ use deno_ast::SourceMapOption; use deno_cache_dir::file_fetcher::CacheSetting; pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::ConfigFile; +use deno_config::deno_json::ConfigFileError; use deno_config::deno_json::FmtConfig; pub use deno_config::deno_json::FmtOptionsConfig; use deno_config::deno_json::LintConfig; @@ -55,6 +56,7 @@ use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_core::serde_json; use deno_core::url::Url; +use deno_error::JsErrorBox; use deno_graph::GraphKind; pub use deno_json::check_warn_tsconfig; use deno_lint::linter::LintConfig as DenoLintConfig; @@ -604,7 +606,8 @@ pub fn create_default_npmrc() -> Arc { }) } -#[derive(Error, Debug, Clone)] +#[derive(Error, Debug, Clone, deno_error::JsError)] +#[class(generic)] pub enum RootCertStoreLoadError { #[error( "Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")" @@ -1104,7 +1107,7 @@ impl CliOptions { pkg_json_dep_resolution, specified_import_map: cli_arg_specified_import_map, }, - |path| Ok(std::fs::read_to_string(path)?), + |path| std::fs::read_to_string(path).map_err(JsErrorBox::from_err), )?) } @@ -1246,11 +1249,14 @@ impl CliOptions { pub fn node_modules_dir( &self, - ) -> Result, AnyError> { + ) -> Result< + Option, + deno_config::deno_json::NodeModulesDirParseError, + > { if let Some(flag) = self.flags.node_modules_dir { return Ok(Some(flag)); } - self.workspace().node_modules_dir().map_err(Into::into) + self.workspace().node_modules_dir() } pub fn vendor_dir_path(&self) -> Option<&PathBuf> { @@ -1260,7 +1266,7 @@ impl CliOptions { pub fn resolve_ts_config_for_emit( &self, config_type: TsConfigType, - ) -> Result { + ) -> Result { self.workspace().resolve_ts_config_for_emit(config_type) } @@ -1289,7 +1295,7 @@ impl CliOptions { pub fn to_compiler_option_types( &self, - ) -> Result, AnyError> { + ) -> Result, serde_json::Error> { self .workspace() .to_compiler_option_types() diff --git a/cli/build.rs b/cli/build.rs index 83290599e6bf06..590fee795d5900 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -13,10 +13,9 @@ mod ts { use std::path::Path; use std::path::PathBuf; - use deno_core::error::custom_error; - use deno_core::error::AnyError; use deno_core::op2; use deno_core::OpState; + use deno_error::JsErrorBox; use serde::Serialize; use super::*; @@ -53,7 +52,7 @@ mod ts { fn op_script_version( _state: &mut OpState, #[string] _arg: &str, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { Ok(Some("1".to_string())) } @@ -72,7 +71,7 @@ mod ts { fn op_load( state: &mut OpState, #[string] load_specifier: &str, - ) -> Result { + ) -> Result { let op_crate_libs = state.borrow::>(); let path_dts = state.borrow::(); let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts"); @@ -93,12 +92,15 @@ mod ts { // if it comes from an op crate, we were supplied with the path to the // file. let path = if let Some(op_crate_lib) = op_crate_libs.get(lib) { - PathBuf::from(op_crate_lib).canonicalize()? + PathBuf::from(op_crate_lib) + .canonicalize() + .map_err(JsErrorBox::from_err)? // otherwise we will generate the path ourself } else { path_dts.join(format!("lib.{lib}.d.ts")) }; - let data = std::fs::read_to_string(path)?; + let data = + std::fs::read_to_string(path).map_err(JsErrorBox::from_err)?; Ok(LoadResponse { data, version: "1".to_string(), @@ -106,13 +108,13 @@ mod ts { script_kind: 3, }) } else { - Err(custom_error( + Err(JsErrorBox::new( "InvalidSpecifier", format!("An invalid specifier was requested: {}", load_specifier), )) } } else { - Err(custom_error( + Err(JsErrorBox::new( "InvalidSpecifier", format!("An invalid specifier was requested: {}", load_specifier), )) diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index fdd8fcf40c2ede..ff9f07fc4ed15b 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -8,7 +8,6 @@ use deno_ast::MediaType; use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::file_fetcher::FetchNoFollowErrorKind; use deno_cache_dir::file_fetcher::FileOrRedirect; -use deno_core::error::AnyError; use deno_core::futures; use deno_core::futures::FutureExt; use deno_core::ModuleSpecifier; @@ -62,6 +61,7 @@ pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache; pub use deno_cache_dir::HttpCache; +use deno_error::JsErrorBox; pub struct FetchCacherOptions { pub file_header_overrides: HashMap>, @@ -194,9 +194,9 @@ impl Loader for FetchCacher { LoaderCacheSetting::Use => None, LoaderCacheSetting::Reload => { if matches!(file_fetcher.cache_setting(), CacheSetting::Only) { - return Err(deno_core::anyhow::anyhow!( + return Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic( "Could not resolve version constraint using only cached data. Try running again without --cached-only" - )); + )))); } Some(CacheSetting::ReloadAll) } @@ -262,28 +262,27 @@ impl Loader for FetchCacher { FetchNoFollowErrorKind::CacheSave { .. } | FetchNoFollowErrorKind::UnsupportedScheme { .. } | FetchNoFollowErrorKind::RedirectHeaderParse { .. } | - FetchNoFollowErrorKind::InvalidHeader { .. } => Err(AnyError::from(err)), + FetchNoFollowErrorKind::InvalidHeader { .. } => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err)))), FetchNoFollowErrorKind::NotCached { .. } => { if options.cache_setting == LoaderCacheSetting::Only { Ok(None) } else { - Err(AnyError::from(err)) + Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(err)))) } }, FetchNoFollowErrorKind::ChecksumIntegrity(err) => { // convert to the equivalent deno_graph error so that it // enhances it if this is passed to deno_graph Err( - deno_graph::source::ChecksumIntegrityError { + deno_graph::source::LoadError::ChecksumIntegrity(deno_graph::source::ChecksumIntegrityError { actual: err.actual, expected: err.expected, - } - .into(), + }), ) } } }, - CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(AnyError::from(permission_check_error)), + CliFetchNoFollowErrorKind::PermissionCheck(permission_check_error) => Err(deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::from_err(permission_check_error)))), } }) } diff --git a/cli/emit.rs b/cli/emit.rs index 32a636de3630fb..e9b5a4e2506e5a 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -11,10 +11,12 @@ use deno_ast::SourceRangedForSpanned; use deno_ast::TranspileModuleOptions; use deno_ast::TranspileResult; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; use deno_graph::MediaType; use deno_graph::Module; use deno_graph::ModuleGraph; @@ -124,7 +126,7 @@ impl Emitter { let transpiled_source = deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source = source.clone(); - move || -> Result<_, AnyError> { + move || { EmitParsedSourceHelper::transpile( &parsed_source_cache, &specifier, @@ -155,7 +157,7 @@ impl Emitter { media_type: MediaType, module_kind: deno_ast::ModuleKind, source: &Arc, - ) -> Result { + ) -> Result { // Note: keep this in sync with the async version above let helper = EmitParsedSourceHelper(self); match helper.pre_emit_parsed_source(specifier, module_kind, source) { @@ -210,7 +212,7 @@ impl Emitter { pub async fn load_and_emit_for_hmr( &self, specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { let media_type = MediaType::from_specifier(specifier); let source_code = tokio::fs::read_to_string( ModuleSpecifier::to_file_path(specifier).unwrap(), @@ -225,17 +227,21 @@ impl Emitter { let source_arc: Arc = source_code.into(); let parsed_source = self .parsed_source_cache - .remove_or_parse_module(specifier, source_arc, media_type)?; + .remove_or_parse_module(specifier, source_arc, media_type) + .map_err(JsErrorBox::from_err)?; // HMR doesn't work with embedded source maps for some reason, so set // the option to not use them (though you should test this out because // this statement is probably wrong) let mut options = self.transpile_and_emit_options.1.clone(); options.source_map = SourceMapOption::None; - let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( - specifier, - media_type, - parsed_source.compute_is_script(), - )?; + let is_cjs = self + .cjs_tracker + .is_cjs_with_known_is_script( + specifier, + media_type, + parsed_source.compute_is_script(), + ) + .map_err(JsErrorBox::from_err)?; let transpiled_source = parsed_source .transpile( &self.transpile_and_emit_options.0, @@ -243,7 +249,8 @@ impl Emitter { module_kind: Some(ModuleKind::from_is_cjs(is_cjs)), }, &options, - )? + ) + .map_err(JsErrorBox::from_err)? .into_source(); Ok(transpiled_source.text) } @@ -282,6 +289,19 @@ enum PreEmitResult { NotCached { source_hash: u64 }, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum EmitParsedSourceHelperError { + #[class(inherit)] + #[error(transparent)] + ParseDiagnostic(#[from] deno_ast::ParseDiagnostic), + #[class(inherit)] + #[error(transparent)] + Transpile(#[from] deno_ast::TranspileError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + /// Helper to share code between async and sync emit_parsed_source methods. struct EmitParsedSourceHelper<'a>(&'a Emitter); @@ -311,7 +331,7 @@ impl<'a> EmitParsedSourceHelper<'a> { source: Arc, transpile_options: &deno_ast::TranspileOptions, emit_options: &deno_ast::EmitOptions, - ) -> Result { + ) -> Result { // nothing else needs the parsed source at this point, so remove from // the cache in order to not transpile owned let parsed_source = parsed_source_cache @@ -351,7 +371,7 @@ impl<'a> EmitParsedSourceHelper<'a> { // todo(dsherret): this is a temporary measure until we have swc erroring for this fn ensure_no_import_assertion( parsed_source: &deno_ast::ParsedSource, -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { fn has_import_assertion(text: &str) -> bool { // good enough text.contains(" assert ") && !text.contains(" with ") @@ -360,7 +380,7 @@ fn ensure_no_import_assertion( fn create_err( parsed_source: &deno_ast::ParsedSource, range: SourceRange, - ) -> AnyError { + ) -> JsErrorBox { let text_info = parsed_source.text_info_lazy(); let loc = text_info.line_and_column_display(range.start); let mut msg = "Import assertions are deprecated. Use `with` keyword, instead of 'assert' keyword.".to_string(); @@ -373,7 +393,7 @@ fn ensure_no_import_assertion( loc.line_number, loc.column_number, )); - deno_core::anyhow::anyhow!("{}", msg) + JsErrorBox::generic(msg) } let deno_ast::ProgramRef::Module(module) = parsed_source.program_ref() else { diff --git a/cli/errors.rs b/cli/errors.rs deleted file mode 100644 index 6500efec503d03..00000000000000 --- a/cli/errors.rs +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -//! There are many types of errors in Deno: -//! - AnyError: a generic wrapper that can encapsulate any type of error. -//! - JsError: a container for the error message and stack trace for exceptions -//! thrown in JavaScript code. We use this to pretty-print stack traces. -//! - Diagnostic: these are errors that originate in TypeScript's compiler. -//! They're similar to JsError, in that they have line numbers. But -//! Diagnostics are compile-time type errors, whereas JsErrors are runtime -//! exceptions. - -use deno_ast::ParseDiagnostic; -use deno_core::error::AnyError; -use deno_graph::source::ResolveError; -use deno_graph::ModuleError; -use deno_graph::ModuleGraphError; -use deno_graph::ModuleLoadError; -use deno_graph::ResolutionError; -use import_map::ImportMapError; - -fn get_import_map_error_class(_: &ImportMapError) -> &'static str { - "URIError" -} - -fn get_diagnostic_class(_: &ParseDiagnostic) -> &'static str { - "SyntaxError" -} - -pub fn get_module_graph_error_class(err: &ModuleGraphError) -> &'static str { - match err { - ModuleGraphError::ResolutionError(err) - | ModuleGraphError::TypesResolutionError(err) => { - get_resolution_error_class(err) - } - ModuleGraphError::ModuleError(err) => get_module_error_class(err), - } -} - -pub fn get_module_error_class(err: &ModuleError) -> &'static str { - use deno_graph::JsrLoadError; - use deno_graph::NpmLoadError; - - match err { - ModuleError::InvalidTypeAssertion { .. } => "SyntaxError", - ModuleError::ParseErr(_, diagnostic) => get_diagnostic_class(diagnostic), - ModuleError::WasmParseErr(..) => "SyntaxError", - ModuleError::UnsupportedMediaType { .. } - | ModuleError::UnsupportedImportAttributeType { .. } => "TypeError", - ModuleError::Missing(_, _) | ModuleError::MissingDynamic(_, _) => { - "NotFound" - } - ModuleError::LoadingErr(_, _, err) => match err { - ModuleLoadError::Loader(err) => get_error_class_name(err.as_ref()), - ModuleLoadError::HttpsChecksumIntegrity(_) - | ModuleLoadError::TooManyRedirects => "Error", - ModuleLoadError::NodeUnknownBuiltinModule(_) => "NotFound", - ModuleLoadError::Decode(_) => "TypeError", - ModuleLoadError::Npm(err) => match err { - NpmLoadError::NotSupportedEnvironment - | NpmLoadError::PackageReqResolution(_) - | NpmLoadError::RegistryInfo(_) => "Error", - NpmLoadError::PackageReqReferenceParse(_) => "TypeError", - }, - ModuleLoadError::Jsr(err) => match err { - JsrLoadError::UnsupportedManifestChecksum - | JsrLoadError::PackageFormat(_) => "TypeError", - JsrLoadError::ContentLoadExternalSpecifier - | JsrLoadError::ContentLoad(_) - | JsrLoadError::ContentChecksumIntegrity(_) - | JsrLoadError::PackageManifestLoad(_, _) - | JsrLoadError::PackageVersionManifestChecksumIntegrity(..) - | JsrLoadError::PackageVersionManifestLoad(_, _) - | JsrLoadError::RedirectInPackage(_) => "Error", - JsrLoadError::PackageNotFound(_) - | JsrLoadError::PackageReqNotFound(_) - | JsrLoadError::PackageVersionNotFound(_) - | JsrLoadError::UnknownExport { .. } => "NotFound", - }, - }, - } -} - -fn get_resolution_error_class(err: &ResolutionError) -> &'static str { - match err { - ResolutionError::ResolverError { error, .. } => { - use ResolveError::*; - match error.as_ref() { - Specifier(_) => "TypeError", - Other(e) => get_error_class_name(e), - } - } - _ => "TypeError", - } -} - -fn get_try_from_int_error_class(_: &std::num::TryFromIntError) -> &'static str { - "TypeError" -} - -pub fn get_error_class_name(e: &AnyError) -> &'static str { - deno_runtime::errors::get_error_class_name(e) - .or_else(|| { - e.downcast_ref::() - .map(get_import_map_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_diagnostic_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_module_graph_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_resolution_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_try_from_int_error_class) - }) - .unwrap_or("Error") -} diff --git a/cli/factory.rs b/cli/factory.rs index 4ae1d94ea8e9fa..86902dfc3bdf85 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -10,6 +10,7 @@ use deno_config::workspace::WorkspaceResolver; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::FeatureChecker; +use deno_error::JsErrorBox; use deno_resolver::cjs::IsCjsResolutionMode; use deno_resolver::npm::NpmReqResolverOptions; use deno_resolver::DenoResolverOptions; @@ -118,7 +119,7 @@ impl CliRootCertStoreProvider { } impl RootCertStoreProvider for CliRootCertStoreProvider { - fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> { self .cell .get_or_try_init(|| { @@ -128,7 +129,7 @@ impl RootCertStoreProvider for CliRootCertStoreProvider { self.maybe_ca_data.clone(), ) }) - .map_err(|e| e.into()) + .map_err(JsErrorBox::from_err) } } diff --git a/cli/graph_util.rs b/cli/graph_util.rs index ac9e75cff0bf95..f32dae8a07910e 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -2,17 +2,18 @@ use std::collections::HashSet; use std::error::Error; -use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; +use deno_config::deno_json; use deno_config::deno_json::JsxImportSourceConfig; use deno_config::workspace::JsrPackageConfig; -use deno_core::anyhow::bail; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_core::serde_json; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; use deno_graph::source::Loader; use deno_graph::source::LoaderChecksum; use deno_graph::source::ResolutionKind; @@ -49,8 +50,6 @@ use crate::cache::GlobalHttpCache; use crate::cache::ModuleInfoCache; use crate::cache::ParsedSourceCache; use crate::colors; -use crate::errors::get_error_class_name; -use crate::errors::get_module_graph_error_class; use crate::file_fetcher::CliFileFetcher; use crate::npm::CliNpmResolver; use crate::resolver::CjsTracker; @@ -59,6 +58,7 @@ use crate::resolver::CliSloppyImportsResolver; use crate::resolver::SloppyImportsCachedFs; use crate::sys::CliSys; use crate::tools::check; +use crate::tools::check::CheckError; use crate::tools::check::TypeChecker; use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; @@ -85,7 +85,7 @@ pub fn graph_valid( sys: &CliSys, roots: &[ModuleSpecifier], options: GraphValidOptions, -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { if options.exit_integrity_errors { graph_exit_integrity_errors(graph); } @@ -104,9 +104,9 @@ pub fn graph_valid( } else { // finally surface the npm resolution result if let Err(err) = &graph.npm_dep_graph_result { - return Err(custom_error( - get_error_class_name(err), - format_deno_graph_error(err.as_ref().deref()), + return Err(JsErrorBox::new( + err.get_class(), + format_deno_graph_error(err), )); } Ok(()) @@ -145,7 +145,7 @@ pub fn graph_walk_errors<'a>( sys: &'a CliSys, roots: &'a [ModuleSpecifier], options: GraphWalkErrorsOptions, -) -> impl Iterator + 'a { +) -> impl Iterator + 'a { graph .walk( roots.iter(), @@ -197,7 +197,7 @@ pub fn graph_walk_errors<'a>( return None; } - Some(custom_error(get_module_graph_error_class(&error), message)) + Some(JsErrorBox::new(error.get_class(), message)) }) } @@ -437,14 +437,14 @@ impl ModuleGraphCreator { } } - pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> { + pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> { self.module_graph_builder.graph_valid(graph) } async fn type_check_graph( &self, graph: ModuleGraph, - ) -> Result, AnyError> { + ) -> Result, CheckError> { self .type_checker .check( @@ -467,6 +467,27 @@ pub struct BuildFastCheckGraphOptions<'a> { pub workspace_fast_check: deno_graph::WorkspaceFastCheckOption<'a>, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum BuildGraphWithNpmResolutionError { + #[class(inherit)] + #[error(transparent)] + SerdeJson(#[from] serde_json::Error), + #[class(inherit)] + #[error(transparent)] + ToMaybeJsxImportSourceConfig( + #[from] deno_json::ToMaybeJsxImportSourceConfigError, + ), + #[class(inherit)] + #[error(transparent)] + NodeModulesDirParse(#[from] deno_json::NodeModulesDirParseError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), + #[class(generic)] + #[error("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead")] + UnsupportedNpmSpecifierEntrypointResolutionWay, +} + pub struct ModuleGraphBuilder { caches: Arc, cjs_tracker: Arc, @@ -524,7 +545,7 @@ impl ModuleGraphBuilder { &self, graph: &mut ModuleGraph, options: CreateGraphOptions<'a>, - ) -> Result<(), AnyError> { + ) -> Result<(), BuildGraphWithNpmResolutionError> { enum MutLoaderRef<'a> { Borrowed(&'a mut dyn Loader), Owned(cache::FetchCacher), @@ -652,7 +673,7 @@ impl ModuleGraphBuilder { loader: &'a mut dyn deno_graph::source::Loader, options: deno_graph::BuildOptions<'a>, npm_caching: NpmCachingStrategy, - ) -> Result<(), AnyError> { + ) -> Result<(), BuildGraphWithNpmResolutionError> { // ensure an "npm install" is done if the user has explicitly // opted into using a node_modules directory if self @@ -689,7 +710,7 @@ impl ModuleGraphBuilder { if roots.iter().any(|r| r.scheme() == "npm") && self.npm_resolver.as_byonm().is_some() { - bail!("Resolving npm specifier entrypoints this way is currently not supported with \"nodeModules\": \"manual\". In the meantime, try with --node-modules-dir=auto instead"); + return Err(BuildGraphWithNpmResolutionError::UnsupportedNpmSpecifierEntrypointResolutionWay); } graph.build(roots, loader, options).await; @@ -740,7 +761,7 @@ impl ModuleGraphBuilder { &self, graph: &mut ModuleGraph, options: BuildFastCheckGraphOptions, - ) -> Result<(), AnyError> { + ) -> Result<(), deno_json::ToMaybeJsxImportSourceConfigError> { if !graph.graph_kind().include_types() { return Ok(()); } @@ -804,7 +825,7 @@ impl ModuleGraphBuilder { /// Check if `roots` and their deps are available. Returns `Ok(())` if /// so. Returns `Err(_)` if there is a known module graph or resolution /// error statically reachable from `roots` and not a dynamic import. - pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), AnyError> { + pub fn graph_valid(&self, graph: &ModuleGraph) -> Result<(), JsErrorBox> { self.graph_roots_valid( graph, &graph.roots.iter().cloned().collect::>(), @@ -815,7 +836,7 @@ impl ModuleGraphBuilder { &self, graph: &ModuleGraph, roots: &[ModuleSpecifier], - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { graph_valid( graph, &self.sys, @@ -832,7 +853,10 @@ impl ModuleGraphBuilder { ) } - fn create_graph_resolver(&self) -> Result { + fn create_graph_resolver( + &self, + ) -> Result + { let jsx_import_source_config = self .cli_options .workspace() @@ -1001,8 +1025,13 @@ fn get_resolution_error_bare_specifier( Some(specifier.as_str()) } else if let ResolutionError::ResolverError { error, .. } = error { if let ResolveError::Other(error) = (*error).as_ref() { - if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = - error.downcast_ref::() + if let Some(import_map::ImportMapErrorKind::UnmappedBareSpecifier( + specifier, + _, + )) = error + .as_any() + .downcast_ref::() + .map(|e| &**e) { Some(specifier.as_str()) } else { @@ -1039,11 +1068,12 @@ fn get_import_prefix_missing_error(error: &ResolutionError) -> Option<&str> { ResolveError::Other(other_error) => { if let Some(SpecifierError::ImportPrefixMissing { specifier, .. - }) = other_error.downcast_ref::() + }) = other_error.as_any().downcast_ref::() { maybe_specifier = Some(specifier); } } + ResolveError::ImportMap(_) => {} } } } @@ -1294,7 +1324,7 @@ mod test { let specifier = ModuleSpecifier::parse("file:///file.ts").unwrap(); let err = import_map.resolve(input, &specifier).err().unwrap(); let err = ResolutionError::ResolverError { - error: Arc::new(ResolveError::Other(err.into())), + error: Arc::new(ResolveError::Other(JsErrorBox::from_err(err))), specifier: input.to_string(), range: Range { specifier, diff --git a/cli/http_util.rs b/cli/http_util.rs index af6709c5d0ab0f..19d9071833cca8 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -6,13 +6,14 @@ use std::thread::ThreadId; use boxed_error::Boxed; use deno_cache_dir::file_fetcher::RedirectHeaderParseError; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::futures::StreamExt; use deno_core::parking_lot::Mutex; use deno_core::serde; use deno_core::serde_json; use deno_core::url::Url; +use deno_error::JsError; +use deno_error::JsErrorBox; use deno_runtime::deno_fetch; use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::CreateHttpClientOptions; @@ -94,34 +95,49 @@ impl HttpClientProvider { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(type)] #[error("Bad response: {:?}{}", .status_code, .response_text.as_ref().map(|s| format!("\n\n{}", s)).unwrap_or_else(String::new))] pub struct BadResponseError { pub status_code: StatusCode, pub response_text: Option, } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct DownloadError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum DownloadErrorKind { + #[class(inherit)] #[error(transparent)] - Fetch(AnyError), + Fetch(deno_fetch::ClientSendError), + #[class(inherit)] #[error(transparent)] UrlParse(#[from] deno_core::url::ParseError), + #[class(generic)] #[error(transparent)] HttpParse(#[from] http::Error), + #[class(inherit)] #[error(transparent)] Json(#[from] serde_json::Error), + #[class(generic)] #[error(transparent)] ToStr(#[from] http::header::ToStrError), + #[class(inherit)] #[error(transparent)] RedirectHeaderParse(RedirectHeaderParseError), + #[class(type)] #[error("Too many redirects.")] TooManyRedirects, + #[class(inherit)] #[error(transparent)] BadResponse(#[from] BadResponseError), + #[class("Http")] + #[error("Not Found.")] + NotFound, + #[class(inherit)] + #[error(transparent)] + Other(JsErrorBox), } #[derive(Debug)] @@ -208,11 +224,11 @@ impl HttpClient { Ok(String::from_utf8(bytes)?) } - pub async fn download(&self, url: Url) -> Result, AnyError> { + pub async fn download(&self, url: Url) -> Result, DownloadError> { let maybe_bytes = self.download_inner(url, None, None).await?; match maybe_bytes { Some(bytes) => Ok(bytes), - None => Err(custom_error("Http", "Not found.")), + None => Err(DownloadErrorKind::NotFound.into_box()), } } @@ -276,7 +292,7 @@ impl HttpClient { get_response_body_with_progress(response, progress_guard) .await .map(|(_, body)| Some(body)) - .map_err(|err| DownloadErrorKind::Fetch(err).into_box()) + .map_err(|err| DownloadErrorKind::Other(err).into_box()) } async fn get_redirected_response( @@ -293,7 +309,7 @@ impl HttpClient { .clone() .send(req) .await - .map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; + .map_err(|e| DownloadErrorKind::Fetch(e).into_box())?; let status = response.status(); if status.is_redirection() { for _ in 0..5 { @@ -313,7 +329,7 @@ impl HttpClient { .clone() .send(req) .await - .map_err(|e| DownloadErrorKind::Fetch(e.into()).into_box())?; + .map_err(|e| DownloadErrorKind::Fetch(e).into_box())?; let status = new_response.status(); if status.is_redirection() { response = new_response; @@ -332,7 +348,7 @@ impl HttpClient { pub async fn get_response_body_with_progress( response: http::Response, progress_guard: Option<&UpdateGuard>, -) -> Result<(HeaderMap, Vec), AnyError> { +) -> Result<(HeaderMap, Vec), JsErrorBox> { use http_body::Body as _; if let Some(progress_guard) = progress_guard { let mut total_size = response.body().size_hint().exact(); diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index 968875e48a3821..f7b487f055c117 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -10,13 +10,13 @@ use deno_ast::SourceRange; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; use deno_config::workspace::MappedResolution; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; use deno_lint::diagnostic::LintDiagnosticRange; use deno_path_util::url_to_file_path; use deno_runtime::deno_node::PathClean; @@ -1070,10 +1070,13 @@ impl CodeActionCollection { // we wrap tsc, we can't handle the asynchronous response, so it is // actually easier to return errors if we ever encounter one of these, // which we really wouldn't expect from the Deno lsp. - return Err(custom_error( - "UnsupportedFix", - "The action returned from TypeScript is unsupported.", - )); + return Err( + JsErrorBox::new( + "UnsupportedFix", + "The action returned from TypeScript is unsupported.", + ) + .into(), + ); } let Some(action) = fix_ts_import_action(specifier, resolution_mode, action, language_server) diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 2aaba928ca42f2..0cd846815341fb 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -41,6 +41,7 @@ use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; use deno_lint::linter::LintConfig as DenoLintConfig; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonCache; @@ -1575,7 +1576,7 @@ impl ConfigData { pkg_json_dep_resolution, specified_import_map, }, - |path| Ok(std::fs::read_to_string(path)?), + |path| std::fs::read_to_string(path).map_err(JsErrorBox::from_err), ) .inspect_err(|err| { lsp_warn!( diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index cc9cca7f2357b5..0982ff5ceb01ac 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -34,7 +34,7 @@ use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; use import_map::ImportMap; -use import_map::ImportMapError; +use import_map::ImportMapErrorKind; use log::error; use tokio::sync::mpsc; use tokio::sync::Mutex; @@ -1297,8 +1297,8 @@ impl DenoDiagnostic { let mut message; message = enhanced_resolution_error_message(err); if let deno_graph::ResolutionError::ResolverError {error, ..} = err{ - if let ResolveError::Other(resolve_error, ..) = (*error).as_ref() { - if let Some(ImportMapError::UnmappedBareSpecifier(specifier, _)) = resolve_error.downcast_ref::() { + if let ResolveError::ImportMap(importmap) = (*error).as_ref() { + if let ImportMapErrorKind::UnmappedBareSpecifier(specifier, _) = &**importmap { if specifier.chars().next().unwrap_or('\0') == '@'{ let hint = format!("\nHint: Use [deno add {}] to add the dependency.", specifier); message.push_str(hint.as_str()); diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 39408e25899923..f31353d4368b3b 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -18,13 +18,13 @@ use deno_ast::swc::visit::VisitWith; use deno_ast::MediaType; use deno_ast::ParsedSource; use deno_ast::SourceTextInfo; -use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::futures::future; use deno_core::futures::future::Shared; use deno_core::futures::FutureExt; use deno_core::parking_lot::Mutex; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; use deno_graph::Resolution; use deno_path_util::url_to_file_path; use deno_runtime::deno_node; @@ -1081,7 +1081,7 @@ impl Documents { .or_else(|| self.file_system_docs.remove_document(specifier)) .map(Ok) .unwrap_or_else(|| { - Err(custom_error( + Err(JsErrorBox::new( "NotFound", format!("The specifier \"{specifier}\" was not found."), )) diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index 6861d9dd636327..35f5374efee692 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -122,7 +122,7 @@ use crate::util::sync::AsyncFlag; struct LspRootCertStoreProvider(RootCertStore); impl RootCertStoreProvider for LspRootCertStoreProvider { - fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + fn get_or_try_init(&self) -> Result<&RootCertStore, deno_error::JsErrorBox> { Ok(&self.0) } } diff --git a/cli/lsp/text.rs b/cli/lsp/text.rs index a9a5f0753af25b..efb9a072a49ff6 100644 --- a/cli/lsp/text.rs +++ b/cli/lsp/text.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; -use deno_core::error::custom_error; use deno_core::error::AnyError; +use deno_error::JsErrorBox; use dissimilar::diff; use dissimilar::Chunk; use text_size::TextRange; @@ -137,7 +137,7 @@ impl LineIndex { if let Some(line_offset) = self.utf8_offsets.get(position.line as usize) { Ok(line_offset + col) } else { - Err(custom_error("OutOfRange", "The position is out of range.")) + Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into()) } } @@ -157,7 +157,7 @@ impl LineIndex { if let Some(line_offset) = self.utf16_offsets.get(position.line as usize) { Ok(line_offset + TextSize::from(position.character)) } else { - Err(custom_error("OutOfRange", "The position is out of range.")) + Err(JsErrorBox::new("OutOfRange", "The position is out of range.").into()) } } diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index d2b501a539a1dc..826021a2883c55 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -20,7 +20,6 @@ use deno_core::anyhow::Context as _; use deno_core::convert::Smi; use deno_core::convert::ToV8; use deno_core::error::AnyError; -use deno_core::error::StdAnyError; use deno_core::futures::stream::FuturesOrdered; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; @@ -4331,7 +4330,7 @@ impl TscSpecifierMap { pub fn normalize>( &self, specifier: S, - ) -> Result { + ) -> Result { let original = specifier.as_ref(); if let Some(specifier) = self.normalized_specifiers.get(original) { return Ok(specifier.clone()); @@ -4339,7 +4338,7 @@ impl TscSpecifierMap { let specifier_str = original.replace(".d.ts.d.ts", ".d.ts"); let specifier = match ModuleSpecifier::parse(&specifier_str) { Ok(s) => s, - Err(err) => return Err(err.into()), + Err(err) => return Err(err), }; if specifier.as_str() != original { self @@ -4437,6 +4436,16 @@ fn op_is_node_file(state: &mut OpState, #[string] path: String) -> bool { r } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +enum LoadError { + #[error("{0}")] + #[class(inherit)] + UrlParse(#[from] deno_core::url::ParseError), + #[error("{0}")] + #[class(inherit)] + SerdeV8(#[from] serde_v8::Error), +} + #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct LoadResponse { @@ -4451,7 +4460,7 @@ fn op_load<'s>( scope: &'s mut v8::HandleScope, state: &mut OpState, #[string] specifier: &str, -) -> Result, AnyError> { +) -> Result, LoadError> { let state = state.borrow_mut::(); let mark = state .performance @@ -4482,7 +4491,7 @@ fn op_load<'s>( fn op_release( state: &mut OpState, #[string] specifier: &str, -) -> Result<(), AnyError> { +) -> Result<(), deno_core::url::ParseError> { let state = state.borrow_mut::(); let mark = state .performance @@ -4499,7 +4508,7 @@ fn op_resolve( state: &mut OpState, #[string] base: String, #[serde] specifiers: Vec<(bool, String)>, -) -> Result>, AnyError> { +) -> Result>, deno_core::url::ParseError> { op_resolve_inner(state, ResolveArgs { base, specifiers }) } @@ -4511,7 +4520,7 @@ struct TscRequestArray { } impl<'a> ToV8<'a> for TscRequestArray { - type Error = StdAnyError; + type Error = serde_v8::Error; fn to_v8( self, @@ -4526,9 +4535,7 @@ impl<'a> ToV8<'a> for TscRequestArray { .unwrap() .into(); let args = args.unwrap_or_else(|| v8::Array::new(scope, 0).into()); - let scope_url = serde_v8::to_v8(scope, self.scope) - .map_err(AnyError::from) - .map_err(StdAnyError::from)?; + let scope_url = serde_v8::to_v8(scope, self.scope)?; let change = self.change.to_v8(scope).unwrap_infallible(); @@ -4586,7 +4593,7 @@ async fn op_poll_requests( fn op_resolve_inner( state: &mut OpState, args: ResolveArgs, -) -> Result>, AnyError> { +) -> Result>, deno_core::url::ParseError> { let state = state.borrow_mut::(); let mark = state.performance.mark_with_args("tsc.op.op_resolve", &args); let referrer = state.specifier_map.normalize(&args.base)?; @@ -4743,7 +4750,7 @@ fn op_script_names(state: &mut OpState) -> ScriptNames { fn op_script_version( state: &mut OpState, #[string] specifier: &str, -) -> Result, AnyError> { +) -> Result, deno_core::url::ParseError> { let state = state.borrow_mut::(); let mark = state.performance.mark("tsc.op.op_script_version"); let specifier = state.specifier_map.normalize(specifier)?; @@ -5398,7 +5405,8 @@ impl TscRequest { fn to_server_request<'s>( &self, scope: &mut v8::HandleScope<'s>, - ) -> Result<(&'static str, Option>), AnyError> { + ) -> Result<(&'static str, Option>), serde_v8::Error> + { let args = match self { TscRequest::GetDiagnostics(args) => { ("$getDiagnostics", Some(serde_v8::to_v8(scope, args)?)) diff --git a/cli/main.rs b/cli/main.rs index 7db471932d00e8..6bbefcf956a63c 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -4,7 +4,6 @@ mod args; mod cache; mod cdp; mod emit; -mod errors; mod factory; mod file_fetcher; mod graph_container; @@ -38,7 +37,7 @@ use std::sync::Arc; use args::TaskFlags; use deno_core::anyhow::Context; use deno_core::error::AnyError; -use deno_core::error::JsError; +use deno_core::error::CoreError; use deno_core::futures::FutureExt; use deno_core::unsync::JoinHandle; use deno_npm::resolution::SnapshotFromLockfileError; @@ -202,7 +201,7 @@ async fn run_subcommand(flags: Arc) -> Result { match result { Ok(v) => Ok(v), Err(script_err) => { - if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = script_err.downcast_ref::() { + if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = util::result::any_and_jserrorbox_downcast_ref::(&script_err) { if flags.node_modules_dir.is_none() { let mut flags = flags.deref().clone(); let watch = match &flags.subcommand { @@ -373,10 +372,14 @@ fn exit_for_error(error: AnyError) -> ! { let mut error_string = format!("{error:?}"); let mut error_code = 1; - if let Some(e) = error.downcast_ref::() { + if let Some(CoreError::Js(e)) = + util::result::any_and_jserrorbox_downcast_ref::(&error) + { error_string = format_js_error(e); } else if let Some(SnapshotFromLockfileError::IntegrityCheckFailed(e)) = - error.downcast_ref::() + util::result::any_and_jserrorbox_downcast_ref::( + &error, + ) { error_string = e.to_string(); error_code = 10; diff --git a/cli/mainrt.rs b/cli/mainrt.rs index 1279554514f4bb..8eea3f85ed5ae4 100644 --- a/cli/mainrt.rs +++ b/cli/mainrt.rs @@ -10,7 +10,6 @@ mod standalone; mod args; mod cache; mod emit; -mod errors; mod file_fetcher; mod http_util; mod js; @@ -30,8 +29,8 @@ use std::env; use std::env::current_exe; use std::sync::Arc; -use deno_core::error::generic_error; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::error::JsError; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; @@ -41,6 +40,7 @@ use indexmap::IndexMap; use standalone::DenoCompileFileSystem; use crate::args::Flags; +use crate::util::result::any_and_jserrorbox_downcast_ref; pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { log::error!( @@ -65,8 +65,10 @@ fn unwrap_or_exit(result: Result) -> T { Err(error) => { let mut error_string = format!("{:?}", error); - if let Some(e) = error.downcast_ref::() { - error_string = format_js_error(e); + if let Some(CoreError::Js(js_error)) = + any_and_jserrorbox_downcast_ref::(&error) + { + error_string = format_js_error(js_error); } exit_with_message(&error_string, 1); diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 446397cad19766..ba53077a3c8d1d 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -14,11 +14,9 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleKind; use deno_core::anyhow::anyhow; -use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::custom_error; -use deno_core::error::generic_error; use deno_core::error::AnyError; +use deno_core::error::ModuleLoaderError; use deno_core::futures::future::FutureExt; use deno_core::futures::Future; use deno_core::parking_lot::Mutex; @@ -31,6 +29,8 @@ use deno_core::ModuleSpecifier; use deno_core::ModuleType; use deno_core::RequestedModuleType; use deno_core::SourceCodeCacheInfo; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; use deno_graph::GraphKind; use deno_graph::JsModule; use deno_graph::JsonModule; @@ -59,7 +59,6 @@ use crate::cache::CodeCache; use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; -use crate::errors::get_module_error_class; use crate::graph_container::MainModuleGraphContainer; use crate::graph_container::ModuleGraphContainer; use crate::graph_container::ModuleGraphUpdatePermit; @@ -79,6 +78,7 @@ use crate::resolver::NotSupportedKindInNpmError; use crate::resolver::NpmModuleLoader; use crate::sys::CliSys; use crate::tools::check; +use crate::tools::check::CheckError; use crate::tools::check::TypeChecker; use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; @@ -86,6 +86,21 @@ use crate::util::text_encoding::source_map_from_code; use crate::worker::CreateModuleLoaderResult; use crate::worker::ModuleLoaderFactory; +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum PrepareModuleLoadError { + #[class(inherit)] + #[error(transparent)] + BuildGraphWithNpmResolution( + #[from] crate::graph_util::BuildGraphWithNpmResolutionError, + ), + #[class(inherit)] + #[error(transparent)] + Check(#[from] CheckError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + pub struct ModuleLoadPreparer { options: Arc, lockfile: Option>, @@ -125,7 +140,7 @@ impl ModuleLoadPreparer { lib: TsTypeLib, permissions: PermissionsContainer, ext_overwrite: Option<&String>, - ) -> Result<(), AnyError> { + ) -> Result<(), PrepareModuleLoadError> { log::debug!("Preparing module load."); let _pb_clear_guard = self.progress_bar.clear_guard(); @@ -206,7 +221,7 @@ impl ModuleLoadPreparer { &self, graph: &ModuleGraph, roots: &[ModuleSpecifier], - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { self.module_graph_builder.graph_roots_valid(graph, roots) } } @@ -423,7 +438,7 @@ impl specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, - ) -> Result { + ) -> Result { let code_source = self.load_code_source(specifier, maybe_referrer).await?; let code = if self.shared.is_inspecting || code_source.media_type == MediaType::Wasm @@ -446,7 +461,7 @@ impl if module_type == ModuleType::Json && requested_module_type != RequestedModuleType::Json { - return Err(generic_error("Attempted to load JSON module without specifying \"type\": \"json\" attribute in the import statement.")); + return Err(JsErrorBox::generic("Attempted to load JSON module without specifying \"type\": \"json\" attribute in the import statement.").into()); } let code_cache = if module_type == ModuleType::JavaScript { @@ -507,7 +522,7 @@ impl fn resolve_referrer( &self, referrer: &str, - ) -> Result { + ) -> Result { let referrer = if referrer.is_empty() && self.shared.is_repl { // FIXME(bartlomieju): this is a hacky way to provide compatibility with REPL // and `Deno.core.evalContext` API. Ideally we should always have a referrer filled @@ -533,7 +548,7 @@ impl &self, raw_specifier: &str, referrer: &ModuleSpecifier, - ) -> Result { + ) -> Result { let graph = self.graph_container.graph(); let resolution = match graph.get(referrer) { Some(Module::Js(module)) => module @@ -547,19 +562,25 @@ impl let specifier = match resolution { Resolution::Ok(resolved) => Cow::Borrowed(&resolved.specifier), Resolution::Err(err) => { - return Err(custom_error( - "TypeError", - format!("{}\n", err.to_string_with_range()), - )); + return Err( + JsErrorBox::type_error(format!("{}\n", err.to_string_with_range())) + .into(), + ); } - Resolution::None => Cow::Owned(self.shared.resolver.resolve( - raw_specifier, - referrer, - deno_graph::Position::zeroed(), - // if we're here, that means it's resolving a dynamic import - ResolutionMode::Import, - NodeResolutionKind::Execution, - )?), + Resolution::None => Cow::Owned( + self + .shared + .resolver + .resolve( + raw_specifier, + referrer, + deno_graph::Position::zeroed(), + // if we're here, that means it's resolving a dynamic import + ResolutionMode::Import, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err)?, + ), }; if self.shared.is_repl { @@ -574,7 +595,7 @@ impl ResolutionMode::Import, NodeResolutionKind::Execution, ) - .map_err(AnyError::from); + .map_err(|e| JsErrorBox::from_err(e).into()); } } @@ -585,7 +606,8 @@ impl .npm_resolver .as_managed() .unwrap() // byonm won't create a Module::Npm - .resolve_pkg_folder_from_deno_module(module.nv_reference.nv())?; + .resolve_pkg_folder_from_deno_module(module.nv_reference.nv()) + .map_err(JsErrorBox::from_err)?; self .shared .node_resolver @@ -701,7 +723,7 @@ impl &self, graph: &'graph ModuleGraph, specifier: &ModuleSpecifier, - ) -> Result>, AnyError> { + ) -> Result>, JsErrorBox> { if specifier.scheme() == "node" { // Node built-in modules should be handled internally. unreachable!("Deno bug. {} was misconfigured internally.", specifier); @@ -710,8 +732,8 @@ impl let maybe_module = match graph.try_get(specifier) { Ok(module) => module, Err(err) => { - return Err(custom_error( - get_module_error_class(err), + return Err(JsErrorBox::new( + err.get_class(), enhance_graph_error( &self.shared.sys, &ModuleGraphError::ModuleError(err.clone()), @@ -739,11 +761,12 @@ impl is_script, .. })) => { - if self.shared.cjs_tracker.is_cjs_with_known_is_script( - specifier, - *media_type, - *is_script, - )? { + if self + .shared + .cjs_tracker + .is_cjs_with_known_is_script(specifier, *media_type, *is_script) + .map_err(JsErrorBox::from_err)? + { return Ok(Some(CodeOrDeferredEmit::Cjs { specifier, media_type: *media_type, @@ -875,16 +898,16 @@ impl ModuleLoader specifier: &str, referrer: &str, _kind: deno_core::ResolutionKind, - ) -> Result { + ) -> Result { fn ensure_not_jsr_non_jsr_remote_import( specifier: &ModuleSpecifier, referrer: &ModuleSpecifier, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { if referrer.as_str().starts_with(jsr_url().as_str()) && !specifier.as_str().starts_with(jsr_url().as_str()) && matches!(specifier.scheme(), "http" | "https") { - bail!("Importing {} blocked. JSR packages cannot import non-JSR remote modules for security reasons.", specifier); + return Err(JsErrorBox::generic(format!("Importing {} blocked. JSR packages cannot import non-JSR remote modules for security reasons.", specifier))); } Ok(()) } @@ -937,7 +960,7 @@ impl ModuleLoader specifier: &ModuleSpecifier, _maybe_referrer: Option, is_dynamic: bool, - ) -> Pin>>> { + ) -> Pin>>> { self.0.shared.in_flight_loads_tracker.increase(); if self.0.shared.in_npm_pkg_checker.in_npm_package(specifier) { return Box::pin(deno_core::futures::future::ready(Ok(()))); @@ -986,7 +1009,8 @@ impl ModuleLoader permissions, None, ) - .await?; + .await + .map_err(JsErrorBox::from_err)?; update_permit.commit(); Ok(()) } @@ -1130,35 +1154,37 @@ impl NodeRequireLoader &self, permissions: &mut dyn deno_runtime::deno_node::NodePermissions, path: &'a Path, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { if let Ok(url) = deno_path_util::url_from_file_path(path) { // allow reading if it's in the module graph if self.graph_container.graph().get(&url).is_some() { - return Ok(std::borrow::Cow::Borrowed(path)); + return Ok(Cow::Borrowed(path)); } } self .npm_registry_permission_checker .ensure_read_permission(permissions, path) + .map_err(JsErrorBox::from_err) } fn load_text_file_lossy( &self, path: &Path, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { // todo(dsherret): use the preloaded module from the graph if available? let media_type = MediaType::from_path(path); - let text = self.sys.fs_read_to_string_lossy(path)?; + let text = self + .sys + .fs_read_to_string_lossy(path) + .map_err(JsErrorBox::from_err)?; if media_type.is_emittable() { - let specifier = deno_path_util::url_from_file_path(path)?; + let specifier = deno_path_util::url_from_file_path(path) + .map_err(JsErrorBox::from_err)?; if self.in_npm_pkg_checker.in_npm_package(&specifier) { - return Err( - NotSupportedKindInNpmError { - media_type, - specifier, - } - .into(), - ); + return Err(JsErrorBox::from_err(NotSupportedKindInNpmError { + media_type, + specifier, + })); } self .emitter @@ -1172,6 +1198,7 @@ impl NodeRequireLoader &text.into(), ) .map(Cow::Owned) + .map_err(JsErrorBox::from_err) } else { Ok(text) } diff --git a/cli/npm/managed/mod.rs b/cli/npm/managed/mod.rs index 55421f41e8c39e..831b0b0ba81344 100644 --- a/cli/npm/managed/mod.rs +++ b/cli/npm/managed/mod.rs @@ -11,6 +11,7 @@ use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::url::Url; +use deno_error::JsErrorBox; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmRegistryApi; @@ -322,6 +323,28 @@ impl std::fmt::Debug for ManagedCliNpmResolver { } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum ResolvePkgFolderFromPkgIdError { + #[class(inherit)] + #[error("{0}")] + NpmPackageFsResolverPackageFolder( + #[from] resolvers::NpmPackageFsResolverPackageFolderError, + ), + #[class(inherit)] + #[error("{0}")] + Io(#[from] std::io::Error), +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum ResolvePkgFolderFromDenoModuleError { + #[class(inherit)] + #[error("{0}")] + PackageNvNotFound(#[from] deno_npm::resolution::PackageNvNotFoundError), + #[class(inherit)] + #[error("{0}")] + ResolvePkgFolderFromPkgId(#[from] ResolvePkgFolderFromPkgIdError), +} + impl ManagedCliNpmResolver { #[allow(clippy::too_many_arguments)] pub fn new( @@ -356,7 +379,7 @@ impl ManagedCliNpmResolver { pub fn resolve_pkg_folder_from_pkg_id( &self, pkg_id: &NpmPackageId, - ) -> Result { + ) -> Result { let path = self.fs_resolver.package_folder(pkg_id)?; let path = canonicalize_path_maybe_not_exists(&self.sys, &path)?; log::debug!( @@ -423,7 +446,7 @@ impl ManagedCliNpmResolver { pub async fn add_and_cache_package_reqs( &self, packages: &[PackageReq], - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { self .add_package_reqs_raw( packages, @@ -436,7 +459,7 @@ impl ManagedCliNpmResolver { pub async fn add_package_reqs_no_cache( &self, packages: &[PackageReq], - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { self .add_package_reqs_raw(packages, None) .await @@ -447,7 +470,7 @@ impl ManagedCliNpmResolver { &self, packages: &[PackageReq], caching: PackageCaching<'_>, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { self .add_package_reqs_raw(packages, Some(caching)) .await @@ -517,7 +540,7 @@ impl ManagedCliNpmResolver { pub async fn inject_synthetic_types_node_package( &self, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { let reqs = &[PackageReq::from_str("@types/node").unwrap()]; // add and ensure this isn't added to the lockfile self @@ -530,16 +553,16 @@ impl ManagedCliNpmResolver { pub async fn cache_packages( &self, caching: PackageCaching<'_>, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { self.fs_resolver.cache_packages(caching).await } pub fn resolve_pkg_folder_from_deno_module( &self, nv: &PackageNv, - ) -> Result { + ) -> Result { let pkg_id = self.resolution.resolve_pkg_id_from_deno_module(nv)?; - self.resolve_pkg_folder_from_pkg_id(&pkg_id) + Ok(self.resolve_pkg_folder_from_pkg_id(&pkg_id)?) } pub fn resolve_pkg_id_from_pkg_req( @@ -580,7 +603,7 @@ impl ManagedCliNpmResolver { /// return value of `false` means that new packages were added to the NPM resolution. pub async fn ensure_top_level_package_json_install( &self, - ) -> Result { + ) -> Result { if !self.top_level_install_flag.raise() { return Ok(true); // already did this } @@ -687,12 +710,12 @@ impl CliNpmReqResolver for ManagedCliNpmResolver { req: &PackageReq, _referrer: &ModuleSpecifier, ) -> Result { - let pkg_id = self - .resolve_pkg_id_from_pkg_req(req) - .map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(err.into()))?; + let pkg_id = self.resolve_pkg_id_from_pkg_req(req).map_err(|err| { + ResolvePkgFolderFromDenoReqError::Managed(Box::new(err)) + })?; self .resolve_pkg_folder_from_pkg_id(&pkg_id) - .map_err(ResolvePkgFolderFromDenoReqError::Managed) + .map_err(|err| ResolvePkgFolderFromDenoReqError::Managed(Box::new(err))) } } diff --git a/cli/npm/managed/resolution.rs b/cli/npm/managed/resolution.rs index 12f7b17565810e..8259062c058465 100644 --- a/cli/npm/managed/resolution.rs +++ b/cli/npm/managed/resolution.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use capacity_builder::StringBuilder; use deno_core::error::AnyError; +use deno_error::JsErrorBox; use deno_lockfile::NpmPackageDependencyLockfileInfo; use deno_lockfile::NpmPackageLockfileInfo; use deno_npm::registry::NpmRegistryApi; @@ -39,7 +40,7 @@ pub struct AddPkgReqsResult { /// package requirements. pub results: Vec>, /// The final result of resolving and caching all the package requirements. - pub dependencies_result: Result<(), AnyError>, + pub dependencies_result: Result<(), JsErrorBox>, } /// Handles updating and storing npm resolution in memory where the underlying @@ -106,7 +107,7 @@ impl NpmResolution { *snapshot_lock.write() = snapshot; Ok(()) } - Err(err) => Err(err.into()), + Err(err) => Err(JsErrorBox::from_err(err)), }, } } diff --git a/cli/npm/managed/resolvers/common.rs b/cli/npm/managed/resolvers/common.rs index 66d991bd496100..0d5fab10d346bd 100644 --- a/cli/npm/managed/resolvers/common.rs +++ b/cli/npm/managed/resolvers/common.rs @@ -8,13 +8,18 @@ use std::path::PathBuf; use async_trait::async_trait; use deno_ast::ModuleSpecifier; -use deno_core::error::AnyError; +use deno_error::JsErrorBox; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use node_resolver::errors::PackageFolderResolveError; use super::super::PackageCaching; +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] +#[error("Package folder not found for '{0}'")] +pub struct NpmPackageFsResolverPackageFolderError(deno_semver::StackString); + /// Part of the resolution that interacts with the file system. #[async_trait(?Send)] pub trait NpmPackageFsResolver: Send + Sync { @@ -26,12 +31,9 @@ pub trait NpmPackageFsResolver: Send + Sync { fn package_folder( &self, package_id: &NpmPackageId, - ) -> Result { + ) -> Result { self.maybe_package_folder(package_id).ok_or_else(|| { - deno_core::anyhow::anyhow!( - "Package folder not found for '{}'", - package_id.as_serialized() - ) + NpmPackageFsResolverPackageFolderError(package_id.as_serialized()) }) } @@ -44,10 +46,10 @@ pub trait NpmPackageFsResolver: Send + Sync { fn resolve_package_cache_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, - ) -> Result, AnyError>; + ) -> Result, std::io::Error>; async fn cache_packages<'a>( &self, caching: PackageCaching<'a>, - ) -> Result<(), AnyError>; + ) -> Result<(), JsErrorBox>; } diff --git a/cli/npm/managed/resolvers/common/bin_entries.rs b/cli/npm/managed/resolvers/common/bin_entries.rs index 32ebd687467e4f..bc69786b6c4f02 100644 --- a/cli/npm/managed/resolvers/common/bin_entries.rs +++ b/cli/npm/managed/resolvers/common/bin_entries.rs @@ -6,8 +6,6 @@ use std::collections::VecDeque; use std::path::Path; use std::path::PathBuf; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageId; @@ -50,6 +48,48 @@ pub fn warn_missing_entrypoint( ); } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum BinEntriesError { + #[class(inherit)] + #[error("Creating '{path}'")] + Creating { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[cfg(unix)] + #[class(inherit)] + #[error("Setting permissions on '{path}'")] + Permissions { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error("Can't set up '{name}' bin at {path}")] + SetUpBin { + name: String, + path: PathBuf, + #[source] + #[inherit] + source: Box, + }, + #[cfg(unix)] + #[class(inherit)] + #[error("Setting permissions on '{path}'")] + RemoveBinSymlink { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), +} + impl<'a> BinEntries<'a> { pub fn new() -> Self { Self::default() @@ -92,15 +132,15 @@ impl<'a> BinEntries<'a> { mut already_seen: impl FnMut( &Path, &str, // bin script - ) -> Result<(), AnyError>, + ) -> Result<(), BinEntriesError>, mut new: impl FnMut( &NpmResolutionPackage, &Path, &str, // bin name &str, // bin script - ) -> Result<(), AnyError>, + ) -> Result<(), BinEntriesError>, mut filter: impl FnMut(&NpmResolutionPackage) -> bool, - ) -> Result<(), AnyError> { + ) -> Result<(), BinEntriesError> { if !self.collisions.is_empty() && !self.sorted { // walking the dependency tree to find out the depth of each package // is sort of expensive, so we only do it if there's a collision @@ -168,11 +208,14 @@ impl<'a> BinEntries<'a> { bin_node_modules_dir_path: &Path, filter: impl FnMut(&NpmResolutionPackage) -> bool, mut handler: impl FnMut(&EntrySetupOutcome<'_>), - ) -> Result<(), AnyError> { + ) -> Result<(), BinEntriesError> { if !self.entries.is_empty() && !bin_node_modules_dir_path.exists() { - std::fs::create_dir_all(bin_node_modules_dir_path).with_context( - || format!("Creating '{}'", bin_node_modules_dir_path.display()), - )?; + std::fs::create_dir_all(bin_node_modules_dir_path).map_err(|source| { + BinEntriesError::Creating { + path: bin_node_modules_dir_path.to_path_buf(), + source, + } + })?; } self.for_each_entry( @@ -209,7 +252,7 @@ impl<'a> BinEntries<'a> { snapshot: &NpmResolutionSnapshot, bin_node_modules_dir_path: &Path, handler: impl FnMut(&EntrySetupOutcome<'_>), - ) -> Result<(), AnyError> { + ) -> Result<(), BinEntriesError> { self.set_up_entries_filtered( snapshot, bin_node_modules_dir_path, @@ -226,7 +269,7 @@ impl<'a> BinEntries<'a> { bin_node_modules_dir_path: &Path, handler: impl FnMut(&EntrySetupOutcome<'_>), only: &HashSet<&NpmPackageId>, - ) -> Result<(), AnyError> { + ) -> Result<(), BinEntriesError> { self.set_up_entries_filtered( snapshot, bin_node_modules_dir_path, @@ -301,7 +344,7 @@ pub fn set_up_bin_entry<'a>( #[allow(unused_variables)] bin_script: &str, #[allow(unused_variables)] package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result, AnyError> { +) -> Result, BinEntriesError> { #[cfg(windows)] { set_up_bin_shim(package, bin_name, bin_node_modules_dir_path)?; @@ -324,14 +367,16 @@ fn set_up_bin_shim( package: &NpmResolutionPackage, bin_name: &str, bin_node_modules_dir_path: &Path, -) -> Result<(), AnyError> { +) -> Result<(), BinEntriesError> { use std::fs; let mut cmd_shim = bin_node_modules_dir_path.join(bin_name); cmd_shim.set_extension("cmd"); let shim = format!("@deno run -A npm:{}/{bin_name} %*", package.id.nv); - fs::write(&cmd_shim, shim).with_context(|| { - format!("Can't set up '{}' bin at {}", bin_name, cmd_shim.display()) + fs::write(&cmd_shim, shim).map_err(|err| BinEntriesError::SetUpBin { + name: bin_name.to_string(), + path: cmd_shim.clone(), + source: Box::new(err.into()), })?; Ok(()) @@ -340,7 +385,7 @@ fn set_up_bin_shim( #[cfg(unix)] /// Make the file at `path` executable if it exists. /// Returns `true` if the file exists, `false` otherwise. -fn make_executable_if_exists(path: &Path) -> Result { +fn make_executable_if_exists(path: &Path) -> Result { use std::io; use std::os::unix::fs::PermissionsExt; let mut perms = match std::fs::metadata(path) { @@ -355,8 +400,11 @@ fn make_executable_if_exists(path: &Path) -> Result { if perms.mode() & 0o111 == 0 { // if the original file is not executable, make it executable perms.set_mode(perms.mode() | 0o111); - std::fs::set_permissions(path, perms).with_context(|| { - format!("Setting permissions on '{}'", path.display()) + std::fs::set_permissions(path, perms).map_err(|source| { + BinEntriesError::Permissions { + path: path.to_path_buf(), + source, + } })?; } @@ -395,14 +443,18 @@ fn symlink_bin_entry<'a>( bin_script: &str, package_path: &'a Path, bin_node_modules_dir_path: &Path, -) -> Result, AnyError> { +) -> Result, BinEntriesError> { use std::io; use std::os::unix::fs::symlink; let link = bin_node_modules_dir_path.join(bin_name); let original = package_path.join(bin_script); - let found = make_executable_if_exists(&original).with_context(|| { - format!("Can't set up '{}' bin at {}", bin_name, original.display()) + let found = make_executable_if_exists(&original).map_err(|source| { + BinEntriesError::SetUpBin { + name: bin_name.to_string(), + path: original.to_path_buf(), + source: Box::new(source), + } })?; if !found { return Ok(EntrySetupOutcome::MissingEntrypoint { @@ -420,27 +472,25 @@ fn symlink_bin_entry<'a>( if let Err(err) = symlink(&original_relative, &link) { if err.kind() == io::ErrorKind::AlreadyExists { // remove and retry - std::fs::remove_file(&link).with_context(|| { - format!( - "Failed to remove existing bin symlink at {}", - link.display() - ) + std::fs::remove_file(&link).map_err(|source| { + BinEntriesError::RemoveBinSymlink { + path: link.clone(), + source, + } })?; - symlink(&original_relative, &link).with_context(|| { - format!( - "Can't set up '{}' bin at {}", - bin_name, - original_relative.display() - ) + symlink(&original_relative, &link).map_err(|source| { + BinEntriesError::SetUpBin { + name: bin_name.to_string(), + path: original_relative.to_path_buf(), + source: Box::new(source.into()), + } })?; return Ok(EntrySetupOutcome::Success); } - return Err(err).with_context(|| { - format!( - "Can't set up '{}' bin at {}", - bin_name, - original_relative.display() - ) + return Err(BinEntriesError::SetUpBin { + name: bin_name.to_string(), + path: original_relative.to_path_buf(), + source: Box::new(err.into()), }); } diff --git a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs index 738326ad22dc46..a0d821cdfc88b1 100644 --- a/cli/npm/managed/resolvers/common/lifecycle_scripts.rs +++ b/cli/npm/managed/resolvers/common/lifecycle_scripts.rs @@ -6,7 +6,6 @@ use std::path::Path; use std::path::PathBuf; use std::rc::Rc; -use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmResolutionPackage; @@ -29,7 +28,7 @@ pub trait LifecycleScriptsStrategy { fn warn_on_scripts_not_run( &self, packages: &[(&NpmResolutionPackage, PathBuf)], - ) -> Result<(), AnyError>; + ) -> Result<(), std::io::Error>; fn has_warned(&self, package: &NpmResolutionPackage) -> bool; @@ -38,7 +37,7 @@ pub trait LifecycleScriptsStrategy { fn did_run_scripts( &self, package: &NpmResolutionPackage, - ) -> Result<(), AnyError>; + ) -> Result<(), std::io::Error>; } pub struct LifecycleScripts<'a> { @@ -84,6 +83,27 @@ fn is_broken_default_install_script(script: &str, package_path: &Path) -> bool { script == "node-gyp rebuild" && !package_path.join("binding.gyp").exists() } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LifecycleScriptsError { + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), + #[class(inherit)] + #[error(transparent)] + BinEntries(#[from] super::bin_entries::BinEntriesError), + #[class(inherit)] + #[error( + "failed to create npm process state tempfile for running lifecycle scripts" + )] + CreateNpmProcessState(#[source] std::io::Error), + #[class(generic)] + #[error(transparent)] + Task(AnyError), + #[class(generic)] + #[error("failed to run scripts for packages: {}", .0.join(", "))] + RunScripts(Vec), +} + impl<'a> LifecycleScripts<'a> { pub fn can_run_scripts(&self, package_nv: &PackageNv) -> bool { if !self.strategy.can_run_scripts() { @@ -141,7 +161,7 @@ impl<'a> LifecycleScripts<'a> { } } - pub fn warn_not_run_scripts(&self) -> Result<(), AnyError> { + pub fn warn_not_run_scripts(&self) -> Result<(), std::io::Error> { if !self.packages_with_scripts_not_run.is_empty() { self .strategy @@ -156,7 +176,7 @@ impl<'a> LifecycleScripts<'a> { packages: &[NpmResolutionPackage], root_node_modules_dir_path: &Path, progress_bar: &ProgressBar, - ) -> Result<(), AnyError> { + ) -> Result<(), LifecycleScriptsError> { let kill_signal = KillSignal::default(); let _drop_signal = kill_signal.clone().drop_guard(); // we don't run with signals forwarded because once signals @@ -179,7 +199,7 @@ impl<'a> LifecycleScripts<'a> { root_node_modules_dir_path: &Path, progress_bar: &ProgressBar, kill_signal: KillSignal, - ) -> Result<(), AnyError> { + ) -> Result<(), LifecycleScriptsError> { self.warn_not_run_scripts()?; let get_package_path = |p: &NpmResolutionPackage| self.strategy.package_path(p); @@ -198,7 +218,7 @@ impl<'a> LifecycleScripts<'a> { snapshot, packages, get_package_path, - )?; + ); let init_cwd = &self.config.initial_cwd; let process_state = crate::npm::managed::npm_process_state( snapshot.as_valid_serialized(), @@ -222,7 +242,8 @@ impl<'a> LifecycleScripts<'a> { let temp_file_fd = deno_runtime::ops::process::npm_process_state_tempfile( process_state.as_bytes(), - ).context("failed to create npm process state tempfile for running lifecycle scripts")?; + ) + .map_err(LifecycleScriptsError::CreateNpmProcessState)?; // SAFETY: fd/handle is valid let _temp_file = unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed @@ -240,7 +261,7 @@ impl<'a> LifecycleScripts<'a> { package, snapshot, get_package_path, - )?; + ); for script_name in ["preinstall", "install", "postinstall"] { if let Some(script) = package.scripts.get(script_name) { if script_name == "install" @@ -273,7 +294,8 @@ impl<'a> LifecycleScripts<'a> { kill_signal: kill_signal.clone(), }, ) - .await?; + .await + .map_err(LifecycleScriptsError::Task)?; let stdout = stdout.unwrap(); let stderr = stderr.unwrap(); if exit_code != 0 { @@ -322,14 +344,12 @@ impl<'a> LifecycleScripts<'a> { if failed_packages.is_empty() { Ok(()) } else { - Err(AnyError::msg(format!( - "failed to run scripts for packages: {}", + Err(LifecycleScriptsError::RunScripts( failed_packages .iter() .map(|p| p.to_string()) - .collect::>() - .join(", ") - ))) + .collect::>(), + )) } } } @@ -349,7 +369,7 @@ fn resolve_baseline_custom_commands<'a>( snapshot: &'a NpmResolutionSnapshot, packages: &'a [NpmResolutionPackage], get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, -) -> Result { +) -> crate::task_runner::TaskCustomCommands { let mut custom_commands = crate::task_runner::TaskCustomCommands::new(); custom_commands .insert("npx".to_string(), Rc::new(crate::task_runner::NpxCommand)); @@ -390,7 +410,7 @@ fn resolve_custom_commands_from_packages< snapshot: &'a NpmResolutionSnapshot, packages: P, get_package_path: impl Fn(&'a NpmResolutionPackage) -> PathBuf, -) -> Result { +) -> crate::task_runner::TaskCustomCommands { for package in packages { let package_path = get_package_path(package); @@ -409,7 +429,7 @@ fn resolve_custom_commands_from_packages< ); } - Ok(commands) + commands } // resolves the custom commands from the dependencies of a package @@ -420,7 +440,7 @@ fn resolve_custom_commands_from_deps( package: &NpmResolutionPackage, snapshot: &NpmResolutionSnapshot, get_package_path: impl Fn(&NpmResolutionPackage) -> PathBuf, -) -> Result { +) -> crate::task_runner::TaskCustomCommands { let mut bin_entries = BinEntries::new(); resolve_custom_commands_from_packages( &mut bin_entries, diff --git a/cli/npm/managed/resolvers/global.rs b/cli/npm/managed/resolvers/global.rs index 417345cefebd2e..9af35b169d6b2a 100644 --- a/cli/npm/managed/resolvers/global.rs +++ b/cli/npm/managed/resolvers/global.rs @@ -9,9 +9,9 @@ use std::sync::Arc; use async_trait::async_trait; use deno_ast::ModuleSpecifier; -use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::StreamExt; +use deno_error::JsErrorBox; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; use deno_npm::NpmResolutionPackage; @@ -134,7 +134,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { fn resolve_package_cache_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, - ) -> Result, AnyError> { + ) -> Result, std::io::Error> { Ok( self .cache @@ -145,7 +145,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { async fn cache_packages<'a>( &self, caching: PackageCaching<'a>, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { let package_partitions = match caching { PackageCaching::All => self .resolution @@ -155,13 +155,16 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { .subset(&reqs) .all_system_packages_partitioned(&self.system_info), }; - cache_packages(&package_partitions.packages, &self.tarball_cache).await?; + cache_packages(&package_partitions.packages, &self.tarball_cache) + .await + .map_err(JsErrorBox::from_err)?; // create the copy package folders for copy in package_partitions.copy_packages { self .cache - .ensure_copy_package(©.get_package_cache_folder_id())?; + .ensure_copy_package(©.get_package_cache_folder_id()) + .map_err(JsErrorBox::from_err)?; } let mut lifecycle_scripts = @@ -174,7 +177,9 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { lifecycle_scripts.add(package, Cow::Borrowed(&package_folder)); } - lifecycle_scripts.warn_not_run_scripts()?; + lifecycle_scripts + .warn_not_run_scripts() + .map_err(JsErrorBox::from_err)?; Ok(()) } @@ -183,7 +188,7 @@ impl NpmPackageFsResolver for GlobalNpmPackageResolver { async fn cache_packages( packages: &[NpmResolutionPackage], tarball_cache: &Arc, -) -> Result<(), AnyError> { +) -> Result<(), deno_npm_cache::EnsurePackageError> { let mut futures_unordered = FuturesUnordered::new(); for package in packages { futures_unordered.push(async move { @@ -235,7 +240,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy fn warn_on_scripts_not_run( &self, packages: &[(&NpmResolutionPackage, PathBuf)], - ) -> std::result::Result<(), deno_core::anyhow::Error> { + ) -> std::result::Result<(), std::io::Error> { log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall")); for (package, _) in packages { log::warn!("┠─ {}", colors::gray(format!("npm:{}", package.id.nv))); @@ -261,7 +266,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy fn did_run_scripts( &self, _package: &NpmResolutionPackage, - ) -> std::result::Result<(), deno_core::anyhow::Error> { + ) -> Result<(), std::io::Error> { Ok(()) } diff --git a/cli/npm/managed/resolvers/local.rs b/cli/npm/managed/resolvers/local.rs index 1a4ec57a69e85c..e1ac3df43bd1a3 100644 --- a/cli/npm/managed/resolvers/local.rs +++ b/cli/npm/managed/resolvers/local.rs @@ -19,12 +19,11 @@ use std::sync::Arc; use async_trait::async_trait; use deno_ast::ModuleSpecifier; use deno_cache_dir::npm::mixed_case_package_name_decode; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::StreamExt; use deno_core::parking_lot::Mutex; use deno_core::url::Url; +use deno_error::JsErrorBox; use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::NpmPackageCacheFolderId; use deno_npm::NpmPackageId; @@ -140,7 +139,7 @@ impl LocalNpmPackageResolver { fn resolve_package_folder_from_specifier( &self, specifier: &ModuleSpecifier, - ) -> Result, AnyError> { + ) -> Result, std::io::Error> { let Some(local_path) = self.resolve_folder_for_specifier(specifier)? else { return Ok(None); }; @@ -225,7 +224,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { fn resolve_package_cache_folder_id_from_specifier( &self, specifier: &ModuleSpecifier, - ) -> Result, AnyError> { + ) -> Result, std::io::Error> { let Some(folder_path) = self.resolve_package_folder_from_specifier(specifier)? else { @@ -251,7 +250,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { async fn cache_packages<'a>( &self, caching: PackageCaching<'a>, - ) -> Result<(), AnyError> { + ) -> Result<(), JsErrorBox> { let snapshot = match caching { PackageCaching::All => self.resolution.snapshot(), PackageCaching::Only(reqs) => self.resolution.subset(&reqs), @@ -267,6 +266,7 @@ impl NpmPackageFsResolver for LocalNpmPackageResolver { &self.lifecycle_scripts, ) .await + .map_err(JsErrorBox::from_err) } } @@ -285,6 +285,38 @@ fn local_node_modules_package_contents_path( .join(&package.id.nv.name) } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum SyncResolutionWithFsError { + #[class(inherit)] + #[error("Creating '{path}'")] + Creating { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error(transparent)] + CopyDirRecursive(#[from] crate::util::fs::CopyDirRecursiveError), + #[class(inherit)] + #[error(transparent)] + SymlinkPackageDir(#[from] SymlinkPackageDirError), + #[class(inherit)] + #[error(transparent)] + BinEntries(#[from] bin_entries::BinEntriesError), + #[class(inherit)] + #[error(transparent)] + LifecycleScripts( + #[from] super::common::lifecycle_scripts::LifecycleScriptsError, + ), + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + /// Creates a pnpm style folder structure. #[allow(clippy::too_many_arguments)] async fn sync_resolution_with_fs( @@ -296,7 +328,7 @@ async fn sync_resolution_with_fs( root_node_modules_dir_path: &Path, system_info: &NpmSystemInfo, lifecycle_scripts: &LifecycleScriptsConfig, -) -> Result<(), AnyError> { +) -> Result<(), SyncResolutionWithFsError> { if snapshot.is_empty() && npm_install_deps_provider.workspace_pkgs().is_empty() { @@ -311,12 +343,18 @@ async fn sync_resolution_with_fs( let deno_local_registry_dir = root_node_modules_dir_path.join(".deno"); let deno_node_modules_dir = deno_local_registry_dir.join("node_modules"); - fs::create_dir_all(&deno_node_modules_dir).with_context(|| { - format!("Creating '{}'", deno_local_registry_dir.display()) + fs::create_dir_all(&deno_node_modules_dir).map_err(|source| { + SyncResolutionWithFsError::Creating { + path: deno_local_registry_dir.to_path_buf(), + source, + } })?; let bin_node_modules_dir_path = root_node_modules_dir_path.join(".bin"); - fs::create_dir_all(&bin_node_modules_dir_path).with_context(|| { - format!("Creating '{}'", bin_node_modules_dir_path.display()) + fs::create_dir_all(&bin_node_modules_dir_path).map_err(|source| { + SyncResolutionWithFsError::Creating { + path: deno_local_registry_dir.to_path_buf(), + source, + } })?; let single_process_lock = LaxSingleProcessFsFlag::lock( @@ -420,7 +458,8 @@ async fn sync_resolution_with_fs( cache_futures.push(async move { tarball_cache .ensure_package(&package.id.nv, &package.dist) - .await?; + .await + .map_err(JsErrorBox::from_err)?; let pb_guard = progress_bar.update_with_prompt( ProgressMessagePrompt::Initialize, &package.id.nv.to_string(), @@ -441,10 +480,12 @@ async fn sync_resolution_with_fs( // write out a file that indicates this folder has been initialized fs::write(initialized_file, tags)?; - Ok::<_, AnyError>(()) + Ok::<_, SyncResolutionWithFsError>(()) } }) - .await??; + .await + .map_err(JsErrorBox::from_err)? + .map_err(JsErrorBox::from_err)?; if package.bin.is_some() { bin_entries_to_setup.borrow_mut().add(package, package_path); @@ -458,7 +499,7 @@ async fn sync_resolution_with_fs( // finally stop showing the progress bar drop(pb_guard); // explicit for clarity - Ok::<_, AnyError>(()) + Ok::<_, JsErrorBox>(()) }); } else if matches!(package_state, PackageFolderState::TagsOutdated) { fs::write(initialized_file, tags)?; @@ -597,8 +638,11 @@ async fn sync_resolution_with_fs( // symlink the dep into the package's child node_modules folder let dest_node_modules = remote.base_dir.join("node_modules"); if !existing_child_node_modules_dirs.contains(&dest_node_modules) { - fs::create_dir_all(&dest_node_modules).with_context(|| { - format!("Creating '{}'", dest_node_modules.display()) + fs::create_dir_all(&dest_node_modules).map_err(|source| { + SyncResolutionWithFsError::Creating { + path: dest_node_modules.clone(), + source, + } })?; existing_child_node_modules_dirs.insert(dest_node_modules.clone()); } @@ -813,7 +857,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy fn did_run_scripts( &self, package: &NpmResolutionPackage, - ) -> std::result::Result<(), deno_core::anyhow::Error> { + ) -> std::result::Result<(), std::io::Error> { std::fs::write(self.ran_scripts_file(package), "")?; Ok(()) } @@ -821,7 +865,7 @@ impl<'a> super::common::lifecycle_scripts::LifecycleScriptsStrategy fn warn_on_scripts_not_run( &self, packages: &[(&NpmResolutionPackage, std::path::PathBuf)], - ) -> Result<(), AnyError> { + ) -> Result<(), std::io::Error> { if !packages.is_empty() { log::warn!("{} The following packages contained npm lifecycle scripts ({}) that were not executed:", colors::yellow("Warning"), colors::gray("preinstall/install/postinstall")); @@ -1041,15 +1085,42 @@ fn get_package_folder_id_from_folder_name( }) } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum SymlinkPackageDirError { + #[class(inherit)] + #[error("Creating '{parent}'")] + Creating { + parent: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error(transparent)] + Other(#[from] std::io::Error), + #[cfg(windows)] + #[class(inherit)] + #[error("Creating junction in node_modules folder")] + FailedCreatingJunction { + #[source] + #[inherit] + source: std::io::Error, + }, +} + fn symlink_package_dir( old_path: &Path, new_path: &Path, -) -> Result<(), AnyError> { +) -> Result<(), SymlinkPackageDirError> { let new_parent = new_path.parent().unwrap(); if new_parent.file_name().unwrap() != "node_modules" { // create the parent folder that will contain the symlink - fs::create_dir_all(new_parent) - .with_context(|| format!("Creating '{}'", new_parent.display()))?; + fs::create_dir_all(new_parent).map_err(|source| { + SymlinkPackageDirError::Creating { + parent: new_parent.to_path_buf(), + source, + } + })?; } // need to delete the previous symlink before creating a new one @@ -1075,7 +1146,7 @@ fn junction_or_symlink_dir( old_path_relative: &Path, old_path: &Path, new_path: &Path, -) -> Result<(), AnyError> { +) -> Result<(), SymlinkPackageDirError> { static USE_JUNCTIONS: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false); @@ -1084,8 +1155,9 @@ fn junction_or_symlink_dir( // needing to elevate privileges on Windows. // Note: junctions don't support relative paths, so we need to use the // absolute path here. - return junction::create(old_path, new_path) - .context("Failed creating junction in node_modules folder"); + return junction::create(old_path, new_path).map_err(|source| { + SymlinkPackageDirError::FailedCreatingJunction { source } + }); } match symlink_dir(&crate::sys::CliSys::default(), old_path_relative, new_path) @@ -1095,8 +1167,9 @@ fn junction_or_symlink_dir( if symlink_err.kind() == std::io::ErrorKind::PermissionDenied => { USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); - junction::create(old_path, new_path) - .context("Failed creating junction in node_modules folder") + junction::create(old_path, new_path).map_err(|source| { + SymlinkPackageDirError::FailedCreatingJunction { source } + }) } Err(symlink_err) => { log::warn!( @@ -1104,8 +1177,9 @@ fn junction_or_symlink_dir( colors::yellow("Warning") ); USE_JUNCTIONS.store(true, std::sync::atomic::Ordering::Relaxed); - junction::create(old_path, new_path) - .context("Failed creating junction in node_modules folder") + junction::create(old_path, new_path).map_err(|source| { + SymlinkPackageDirError::FailedCreatingJunction { source } + }) } } } diff --git a/cli/npm/managed/resolvers/mod.rs b/cli/npm/managed/resolvers/mod.rs index cc4c735c7c013e..77d00a896e716a 100644 --- a/cli/npm/managed/resolvers/mod.rs +++ b/cli/npm/managed/resolvers/mod.rs @@ -10,6 +10,7 @@ use std::sync::Arc; use deno_npm::NpmSystemInfo; pub use self::common::NpmPackageFsResolver; +pub use self::common::NpmPackageFsResolverPackageFolderError; use self::global::GlobalNpmPackageResolver; use self::local::LocalNpmPackageResolver; use super::resolution::NpmResolution; diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index 710c24f98d61e3..d66b3e618fc804 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -33,6 +33,7 @@ pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::ManagedCliNpmResolver; pub use self::managed::PackageCaching; +pub use self::managed::ResolvePkgFolderFromDenoModuleError; pub use self::permission_checker::NpmRegistryReadPermissionChecker; pub use self::permission_checker::NpmRegistryReadPermissionCheckerMode; use crate::file_fetcher::CliFileFetcher; @@ -90,7 +91,9 @@ impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient { | Json { .. } | ToStr { .. } | RedirectHeaderParse { .. } - | TooManyRedirects => None, + | TooManyRedirects + | NotFound + | Other(_) => None, BadResponse(bad_response_error) => { Some(bad_response_error.status_code) } diff --git a/cli/npm/permission_checker.rs b/cli/npm/permission_checker.rs index 01fed08954f6ea..53031b5bd40b44 100644 --- a/cli/npm/permission_checker.rs +++ b/cli/npm/permission_checker.rs @@ -6,9 +6,8 @@ use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; +use deno_error::JsErrorBox; use deno_runtime::deno_node::NodePermissions; use sys_traits::FsCanonicalize; @@ -28,6 +27,16 @@ pub struct NpmRegistryReadPermissionChecker { mode: NpmRegistryReadPermissionCheckerMode, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(inherit)] +#[error("failed canonicalizing '{path}'")] +struct EnsureRegistryReadPermissionError { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, +} + impl NpmRegistryReadPermissionChecker { pub fn new(sys: CliSys, mode: NpmRegistryReadPermissionCheckerMode) -> Self { Self { @@ -42,7 +51,7 @@ impl NpmRegistryReadPermissionChecker { &self, permissions: &mut dyn NodePermissions, path: &'a Path, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { if permissions.query_read_all() { return Ok(Cow::Borrowed(path)); // skip permissions checks below } @@ -52,7 +61,9 @@ impl NpmRegistryReadPermissionChecker { if path.components().any(|c| c.as_os_str() == "node_modules") { Ok(Cow::Borrowed(path)) } else { - permissions.check_read_path(path).map_err(Into::into) + permissions + .check_read_path(path) + .map_err(JsErrorBox::from_err) } } NpmRegistryReadPermissionCheckerMode::Global(registry_path) @@ -66,7 +77,7 @@ impl NpmRegistryReadPermissionChecker { if is_path_in_node_modules { let mut cache = self.cache.lock(); let mut canonicalize = - |path: &Path| -> Result, AnyError> { + |path: &Path| -> Result, JsErrorBox> { match cache.get(path) { Some(canon) => Ok(Some(canon.clone())), None => match self.sys.fs_canonicalize(path) { @@ -78,9 +89,12 @@ impl NpmRegistryReadPermissionChecker { if e.kind() == ErrorKind::NotFound { return Ok(None); } - Err(AnyError::from(e)).with_context(|| { - format!("failed canonicalizing '{}'", path.display()) - }) + Err(JsErrorBox::from_err( + EnsureRegistryReadPermissionError { + path: path.to_path_buf(), + source: e, + }, + )) } }, } @@ -98,7 +112,9 @@ impl NpmRegistryReadPermissionChecker { } } - permissions.check_read_path(path).map_err(Into::into) + permissions + .check_read_path(path) + .map_err(JsErrorBox::from_err) } } } diff --git a/cli/ops/bench.rs b/cli/ops/bench.rs index c6eca9216f1171..a06182fbd0f41f 100644 --- a/cli/ops/bench.rs +++ b/cli/ops/bench.rs @@ -3,13 +3,11 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; -use deno_core::error::generic_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_error::JsErrorBox; use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_web::StartTime; @@ -78,7 +76,7 @@ pub fn op_pledge_test_permissions( pub fn op_restore_test_permissions( state: &mut OpState, #[serde] token: Uuid, -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { if let Some(permissions_holder) = state.try_take::() { if token != permissions_holder.0 { panic!("restore test permissions token does not match the stored token"); @@ -88,7 +86,7 @@ pub fn op_restore_test_permissions( state.put::(permissions); Ok(()) } else { - Err(generic_error("no permissions to restore")) + Err(JsErrorBox::generic("no permissions to restore")) } } @@ -106,9 +104,9 @@ fn op_register_bench( only: bool, warmup: bool, #[buffer] ret_buf: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { if ret_buf.len() != 4 { - return Err(type_error(format!( + return Err(JsErrorBox::type_error(format!( "Invalid ret_buf length: {}", ret_buf.len() ))); diff --git a/cli/ops/jupyter.rs b/cli/ops/jupyter.rs index 6a9252c35ac1ff..3160f991bf2d6e 100644 --- a/cli/ops/jupyter.rs +++ b/cli/ops/jupyter.rs @@ -94,10 +94,12 @@ pub fn op_jupyter_input( None } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum JupyterBroadcastError { + #[class(inherit)] #[error(transparent)] SerdeJson(serde_json::Error), + #[class(generic)] #[error(transparent)] ZeroMq(AnyError), } diff --git a/cli/ops/lint.rs b/cli/ops/lint.rs index 0a444e942c0505..c13cb21a53d43f 100644 --- a/cli/ops/lint.rs +++ b/cli/ops/lint.rs @@ -2,25 +2,36 @@ use deno_ast::MediaType; use deno_ast::ModuleSpecifier; -use deno_core::error::generic_error; -use deno_core::error::AnyError; +use deno_ast::ParseDiagnostic; use deno_core::op2; use crate::tools::lint; deno_core::extension!(deno_lint, ops = [op_lint_create_serialized_ast,],); +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LintError { + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), + #[class(inherit)] + #[error(transparent)] + ParseDiagnostic(#[from] ParseDiagnostic), + #[class(type)] + #[error("Failed to parse path as URL: {0}")] + PathParse(std::path::PathBuf), +} + #[op2] #[buffer] fn op_lint_create_serialized_ast( #[string] file_name: &str, #[string] source: String, -) -> Result, AnyError> { +) -> Result, LintError> { let file_text = deno_ast::strip_bom(source); let path = std::env::current_dir()?.join(file_name); - let specifier = ModuleSpecifier::from_file_path(&path).map_err(|_| { - generic_error(format!("Failed to parse path as URL: {}", path.display())) - })?; + let specifier = ModuleSpecifier::from_file_path(&path) + .map_err(|_| LintError::PathParse(path))?; let media_type = MediaType::from_specifier(&specifier); let parsed_source = deno_ast::parse_program(deno_ast::ParseParams { specifier, diff --git a/cli/ops/testing.rs b/cli/ops/testing.rs index 84e9aff83be4d2..c00ab949be2c4b 100644 --- a/cli/ops/testing.rs +++ b/cli/ops/testing.rs @@ -3,13 +3,11 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; -use deno_core::error::generic_error; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::v8; use deno_core::ModuleSpecifier; use deno_core::OpState; +use deno_error::JsErrorBox; use deno_runtime::deno_permissions::ChildPermissionsArg; use deno_runtime::deno_permissions::PermissionsContainer; use uuid::Uuid; @@ -73,7 +71,7 @@ pub fn op_pledge_test_permissions( pub fn op_restore_test_permissions( state: &mut OpState, #[serde] token: Uuid, -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { if let Some(permissions_holder) = state.try_take::() { if token != permissions_holder.0 { panic!("restore test permissions token does not match the stored token"); @@ -83,7 +81,7 @@ pub fn op_restore_test_permissions( state.put::(permissions); Ok(()) } else { - Err(generic_error("no permissions to restore")) + Err(JsErrorBox::generic("no permissions to restore")) } } @@ -103,9 +101,9 @@ fn op_register_test( #[smi] line_number: u32, #[smi] column_number: u32, #[buffer] ret_buf: &mut [u8], -) -> Result<(), AnyError> { +) -> Result<(), JsErrorBox> { if ret_buf.len() != 4 { - return Err(type_error(format!( + return Err(JsErrorBox::type_error(format!( "Invalid ret_buf length: {}", ret_buf.len() ))); diff --git a/cli/resolver.rs b/cli/resolver.rs index 661685c08166cf..7873a9cce0e2f1 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -11,12 +11,12 @@ use dashmap::DashSet; use deno_ast::MediaType; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; -use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::url::Url; use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; +use deno_error::JsErrorBox; use deno_graph::source::ResolveError; use deno_graph::source::UnknownBuiltInNodeModuleError; use deno_graph::NpmLoadError; @@ -61,7 +61,8 @@ pub struct ModuleCodeStringSource { pub media_type: MediaType, } -#[derive(Debug, Error)] +#[derive(Debug, Error, deno_error::JsError)] +#[class(type)] #[error("{media_type} files are not supported in npm packages: {specifier}")] pub struct NotSupportedKindInNpmError { pub media_type: MediaType, @@ -225,10 +226,12 @@ impl CliResolver { ) => match mapped_resolution_error { MappedResolutionError::Specifier(e) => ResolveError::Specifier(e), // deno_graph checks specifically for an ImportMapError - MappedResolutionError::ImportMap(e) => ResolveError::Other(e.into()), - err => ResolveError::Other(err.into()), + MappedResolutionError::ImportMap(e) => ResolveError::ImportMap(e), + MappedResolutionError::Workspace(e) => { + ResolveError::Other(JsErrorBox::from_err(e)) + } }, - err => ResolveError::Other(err.into()), + err => ResolveError::Other(JsErrorBox::from_err(err)), })?; if resolution.found_package_json_dep { @@ -356,26 +359,28 @@ impl<'a> deno_graph::source::NpmResolver for WorkerCliNpmGraphResolver<'a> { .map(|r| { r.map_err(|err| match err { NpmResolutionError::Registry(e) => { - NpmLoadError::RegistryInfo(Arc::new(e.into())) + NpmLoadError::RegistryInfo(Arc::new(e)) } NpmResolutionError::Resolution(e) => { - NpmLoadError::PackageReqResolution(Arc::new(e.into())) + NpmLoadError::PackageReqResolution(Arc::new(e)) } NpmResolutionError::DependencyEntry(e) => { - NpmLoadError::PackageReqResolution(Arc::new(e.into())) + NpmLoadError::PackageReqResolution(Arc::new(e)) } }) }) .collect(), dep_graph_result: match top_level_result { - Ok(()) => result.dependencies_result.map_err(Arc::new), + Ok(()) => result + .dependencies_result + .map_err(|e| Arc::new(e) as Arc), Err(err) => Err(Arc::new(err)), }, } } None => { - let err = Arc::new(anyhow!( - "npm specifiers were requested; but --no-npm is specified" + let err = Arc::new(JsErrorBox::generic( + "npm specifiers were requested; but --no-npm is specified", )); NpmResolvePkgReqsResult { results: package_reqs diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index 1e21e69eb9eb85..30e939cb7ad801 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -21,9 +21,8 @@ use deno_config::workspace::MappedResolutionError; use deno_config::workspace::ResolverWorkspaceJsrPackage; use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::Context; -use deno_core::error::generic_error; -use deno_core::error::type_error; use deno_core::error::AnyError; +use deno_core::error::ModuleLoaderError; use deno_core::futures::future::LocalBoxFuture; use deno_core::futures::FutureExt; use deno_core::v8_set_flags; @@ -36,6 +35,7 @@ use deno_core::ModuleType; use deno_core::RequestedModuleType; use deno_core::ResolutionKind; use deno_core::SourceCodeCacheInfo; +use deno_error::JsErrorBox; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonDepValue; use deno_resolver::cjs::IsCjsResolutionMode; @@ -182,25 +182,32 @@ impl ModuleLoader for EmbeddedModuleLoader { raw_specifier: &str, referrer: &str, kind: ResolutionKind, - ) -> Result { + ) -> Result { let referrer = if referrer == "." { if kind != ResolutionKind::MainModule { - return Err(generic_error(format!( - "Expected to resolve main module, got {:?} instead.", - kind - ))); + return Err( + JsErrorBox::generic(format!( + "Expected to resolve main module, got {:?} instead.", + kind + )) + .into(), + ); } let current_dir = std::env::current_dir().unwrap(); deno_core::resolve_path(".", ¤t_dir)? } else { ModuleSpecifier::parse(referrer).map_err(|err| { - type_error(format!("Referrer uses invalid specifier: {}", err)) + JsErrorBox::type_error(format!( + "Referrer uses invalid specifier: {}", + err + )) })? }; let referrer_kind = if self .shared .cjs_tracker - .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer))? + .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer)) + .map_err(JsErrorBox::from_err)? { ResolutionMode::Require } else { @@ -217,7 +224,8 @@ impl ModuleLoader for EmbeddedModuleLoader { &referrer, referrer_kind, NodeResolutionKind::Execution, - )? + ) + .map_err(JsErrorBox::from_err)? .into_url(), ); } @@ -245,14 +253,18 @@ impl ModuleLoader for EmbeddedModuleLoader { Some(&referrer), referrer_kind, NodeResolutionKind::Execution, - )?, + ) + .map_err(JsErrorBox::from_err)?, ), Ok(MappedResolution::PackageJson { dep_result, sub_path, alias, .. - }) => match dep_result.as_ref().map_err(|e| AnyError::from(e.clone()))? { + }) => match dep_result + .as_ref() + .map_err(|e| JsErrorBox::from_err(e.clone()))? + { PackageJsonDepValue::Req(req) => self .shared .npm_req_resolver @@ -263,7 +275,7 @@ impl ModuleLoader for EmbeddedModuleLoader { referrer_kind, NodeResolutionKind::Execution, ) - .map_err(AnyError::from), + .map_err(|e| JsErrorBox::from_err(e).into()), PackageJsonDepValue::Workspace(version_req) => { let pkg_folder = self .shared @@ -271,7 +283,8 @@ impl ModuleLoader for EmbeddedModuleLoader { .resolve_workspace_pkg_json_folder_for_pkg_json_dep( alias, version_req, - )?; + ) + .map_err(JsErrorBox::from_err)?; Ok( self .shared @@ -282,7 +295,8 @@ impl ModuleLoader for EmbeddedModuleLoader { Some(&referrer), referrer_kind, NodeResolutionKind::Execution, - )?, + ) + .map_err(JsErrorBox::from_err)?, ) } }, @@ -291,12 +305,18 @@ impl ModuleLoader for EmbeddedModuleLoader { if let Ok(reference) = NpmPackageReqReference::from_specifier(&specifier) { - return Ok(self.shared.npm_req_resolver.resolve_req_reference( - &reference, - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - )?); + return Ok( + self + .shared + .npm_req_resolver + .resolve_req_reference( + &reference, + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err)?, + ); } if specifier.scheme() == "jsr" { @@ -318,18 +338,22 @@ impl ModuleLoader for EmbeddedModuleLoader { Err(err) if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" => { - let maybe_res = self.shared.npm_req_resolver.resolve_if_for_npm_pkg( - raw_specifier, - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - )?; + let maybe_res = self + .shared + .npm_req_resolver + .resolve_if_for_npm_pkg( + raw_specifier, + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err)?; if let Some(res) = maybe_res { return Ok(res.into_url()); } - Err(err.into()) + Err(JsErrorBox::from_err(err).into()) } - Err(err) => Err(err.into()), + Err(err) => Err(JsErrorBox::from_err(err).into()), } } @@ -360,9 +384,9 @@ impl ModuleLoader for EmbeddedModuleLoader { { Ok(response) => response, Err(err) => { - return deno_core::ModuleLoadResponse::Sync(Err(type_error( - format!("{:#}", err), - ))); + return deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:#}", err)).into(), + )); } }; return deno_core::ModuleLoadResponse::Sync(Ok( @@ -420,9 +444,9 @@ impl ModuleLoader for EmbeddedModuleLoader { { Ok(is_maybe_cjs) => is_maybe_cjs, Err(err) => { - return deno_core::ModuleLoadResponse::Sync(Err(type_error( - format!("{:?}", err), - ))); + return deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:?}", err)).into(), + )); } }; if is_maybe_cjs { @@ -482,12 +506,16 @@ impl ModuleLoader for EmbeddedModuleLoader { )) } } - Ok(None) => deno_core::ModuleLoadResponse::Sync(Err(type_error( - format!("{MODULE_NOT_FOUND}: {}", original_specifier), - ))), - Err(err) => deno_core::ModuleLoadResponse::Sync(Err(type_error( - format!("{:?}", err), - ))), + Ok(None) => deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!( + "{MODULE_NOT_FOUND}: {}", + original_specifier + )) + .into(), + )), + Err(err) => deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:?}", err)).into(), + )), } } @@ -553,7 +581,7 @@ impl NodeRequireLoader for EmbeddedModuleLoader { &self, permissions: &mut dyn deno_runtime::deno_node::NodePermissions, path: &'a std::path::Path, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { if self.shared.modules.has_file(path) { // allow reading if the file is in the snapshot return Ok(Cow::Borrowed(path)); @@ -563,17 +591,23 @@ impl NodeRequireLoader for EmbeddedModuleLoader { .shared .npm_registry_permission_checker .ensure_read_permission(permissions, path) + .map_err(JsErrorBox::from_err) } fn load_text_file_lossy( &self, path: &std::path::Path, - ) -> Result, AnyError> { - let file_entry = self.shared.vfs.file_entry(path)?; + ) -> Result, JsErrorBox> { + let file_entry = self + .shared + .vfs + .file_entry(path) + .map_err(JsErrorBox::from_err)?; let file_bytes = self .shared .vfs - .read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph)?; + .read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph) + .map_err(JsErrorBox::from_err)?; Ok(from_utf8_lossy_cow(file_bytes)) } @@ -626,10 +660,10 @@ struct StandaloneRootCertStoreProvider { } impl RootCertStoreProvider for StandaloneRootCertStoreProvider { - fn get_or_try_init(&self) -> Result<&RootCertStore, AnyError> { + fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> { self.cell.get_or_try_init(|| { get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone()) - .map_err(|err| err.into()) + .map_err(JsErrorBox::from_err) }) } } diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index 42895089ef689a..6a57c4ce6ca7d4 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -6,8 +6,9 @@ use std::sync::Arc; use std::time::Duration; use deno_config::glob::WalkEntry; -use deno_core::error::generic_error; +use deno_core::anyhow::anyhow; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::error::JsError; use deno_core::futures::future; use deno_core::futures::stream; @@ -18,6 +19,7 @@ use deno_core::unsync::spawn_blocking; use deno_core::v8; use deno_core::ModuleSpecifier; use deno_core::PollEventLoopOptions; +use deno_error::JsErrorBox; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::permissions::RuntimePermissionDescriptorParser; @@ -162,17 +164,14 @@ async fn bench_specifier( .await { Ok(()) => Ok(()), - Err(error) => { - if error.is::() { - sender.send(BenchEvent::UncaughtError( - specifier.to_string(), - Box::new(error.downcast::().unwrap()), - ))?; - Ok(()) - } else { - Err(error) - } + Err(CoreError::Js(error)) => { + sender.send(BenchEvent::UncaughtError( + specifier.to_string(), + Box::new(error), + ))?; + Ok(()) } + Err(e) => Err(e.into()), } } @@ -183,7 +182,7 @@ async fn bench_specifier_inner( specifier: ModuleSpecifier, sender: &UnboundedSender, filter: TestFilter, -) -> Result<(), AnyError> { +) -> Result<(), CoreError> { let mut worker = worker_factory .create_custom_worker( WorkerExecutionMode::Bench, @@ -230,14 +229,18 @@ async fn bench_specifier_inner( .partial_cmp(&groups.get_index_of(&d2.group).unwrap()) .unwrap() }); - sender.send(BenchEvent::Plan(BenchPlan { - origin: specifier.to_string(), - total: benchmarks.len(), - used_only, - names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(), - }))?; + sender + .send(BenchEvent::Plan(BenchPlan { + origin: specifier.to_string(), + total: benchmarks.len(), + used_only, + names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(), + })) + .map_err(JsErrorBox::from_err)?; for (desc, function) in benchmarks { - sender.send(BenchEvent::Wait(desc.id))?; + sender + .send(BenchEvent::Wait(desc.id)) + .map_err(JsErrorBox::from_err)?; let call = worker.js_runtime.call(&function); let result = worker .js_runtime @@ -245,8 +248,11 @@ async fn bench_specifier_inner( .await?; let scope = &mut worker.js_runtime.handle_scope(); let result = v8::Local::new(scope, result); - let result = serde_v8::from_v8::(scope, result)?; - sender.send(BenchEvent::Result(desc.id, result))?; + let result = serde_v8::from_v8::(scope, result) + .map_err(JsErrorBox::from_err)?; + sender + .send(BenchEvent::Result(desc.id, result)) + .map_err(JsErrorBox::from_err)?; } // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the @@ -356,13 +362,13 @@ async fn bench_specifiers( reporter.report_end(&report); if used_only { - return Err(generic_error( + return Err(anyhow!( "Bench failed because the \"only\" option was used", )); } if report.failed > 0 { - return Err(generic_error("Bench failed")); + return Err(anyhow!("Bench failed")); } Ok(()) @@ -440,7 +446,7 @@ pub async fn run_benchmarks( .collect::>(); if specifiers.is_empty() { - return Err(generic_error("No bench modules found")); + return Err(anyhow!("No bench modules found")); } let main_graph_container = factory.main_module_graph_container().await?; diff --git a/cli/tools/check.rs b/cli/tools/check.rs index 1ee3f1782b4d45..53fd5c5db914de 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -6,7 +6,9 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; +use deno_config::deno_json; use deno_core::error::AnyError; +use deno_error::JsErrorBox; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; @@ -112,6 +114,27 @@ pub struct TypeChecker { sys: CliSys, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum CheckError { + #[class(inherit)] + #[error(transparent)] + Diagnostics(#[from] Diagnostics), + #[class(inherit)] + #[error(transparent)] + ConfigFile(#[from] deno_json::ConfigFileError), + #[class(inherit)] + #[error(transparent)] + ToMaybeJsxImportSourceConfig( + #[from] deno_json::ToMaybeJsxImportSourceConfigError, + ), + #[class(inherit)] + #[error(transparent)] + TscExec(#[from] tsc::ExecError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + impl TypeChecker { pub fn new( caches: Arc, @@ -141,7 +164,7 @@ impl TypeChecker { &self, graph: ModuleGraph, options: CheckOptions, - ) -> Result, AnyError> { + ) -> Result, CheckError> { let (graph, mut diagnostics) = self.check_diagnostics(graph, options).await?; diagnostics.emit_warnings(); @@ -160,7 +183,7 @@ impl TypeChecker { &self, mut graph: ModuleGraph, options: CheckOptions, - ) -> Result<(Arc, Diagnostics), AnyError> { + ) -> Result<(Arc, Diagnostics), CheckError> { if !options.type_check_mode.is_true() || graph.roots.is_empty() { return Ok((graph.into(), Default::default())); } diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index b148c9a0b18344..96dd6798f53202 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -8,9 +8,9 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; +use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_graph::GraphKind; @@ -330,7 +330,7 @@ async fn resolve_compile_executable_output_path( .map(PathBuf::from) } - output_path.ok_or_else(|| generic_error( + output_path.ok_or_else(|| anyhow!( "An executable name was not provided. One could not be inferred from the URL. Aborting.", )).map(|output_path| { get_os_specific_filepath(output_path, &compile_flags.target) diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 522d7d75bee8f5..06afa5fac2d892 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -17,7 +17,6 @@ use deno_config::glob::PathOrPattern; use deno_config::glob::PathOrPatternSet; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::sourcemap::SourceMap; @@ -430,7 +429,7 @@ fn collect_coverages( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(cli_options.vendor_dir_path().map(ToOwned::to_owned)) - .collect_file_patterns(&CliSys::default(), file_patterns)?; + .collect_file_patterns(&CliSys::default(), file_patterns); let coverage_patterns = FilePatterns { base: initial_cwd.to_path_buf(), @@ -505,7 +504,7 @@ pub fn cover_files( coverage_flags: CoverageFlags, ) -> Result<(), AnyError> { if coverage_flags.files.include.is_empty() { - return Err(generic_error("No matching coverage profiles found")); + return Err(anyhow!("No matching coverage profiles found")); } let factory = CliFactory::from_flags(flags); @@ -527,7 +526,7 @@ pub fn cover_files( cli_options.initial_cwd(), )?; if script_coverages.is_empty() { - return Err(generic_error("No coverage files found")); + return Err(anyhow!("No coverage files found")); } let script_coverages = filter_coverages( script_coverages, @@ -536,7 +535,7 @@ pub fn cover_files( in_npm_pkg_checker.as_ref(), ); if script_coverages.is_empty() { - return Err(generic_error("No covered files included in the report")); + return Err(anyhow!("No covered files included in the report")); } let proc_coverages: Vec<_> = script_coverages diff --git a/cli/tools/doc.rs b/cli/tools/doc.rs index 2fa944b362c9ba..114c8f958a8e9b 100644 --- a/cli/tools/doc.rs +++ b/cli/tools/doc.rs @@ -62,10 +62,11 @@ async fn generate_doc_nodes_for_builtin_types( )], Vec::new(), ); + let roots = vec![source_file_specifier.clone()]; let mut graph = deno_graph::ModuleGraph::new(GraphKind::TypesOnly); graph .build( - vec![source_file_specifier.clone()], + roots.clone(), &loader, deno_graph::BuildOptions { imports: Vec::new(), @@ -85,14 +86,13 @@ async fn generate_doc_nodes_for_builtin_types( let doc_parser = doc::DocParser::new( &graph, parser, + &roots, doc::DocParserOptions { diagnostics: false, private: doc_flags.private, }, )?; - let nodes = doc_parser.parse_module(&source_file_specifier)?.definitions; - - Ok(IndexMap::from([(source_file_specifier, nodes)])) + Ok(doc_parser.parse()?) } pub async fn doc( @@ -158,19 +158,13 @@ pub async fn doc( let doc_parser = doc::DocParser::new( &graph, &capturing_parser, + &module_specifiers, doc::DocParserOptions { private: doc_flags.private, diagnostics: doc_flags.lint, }, )?; - - let mut doc_nodes_by_url = - IndexMap::with_capacity(module_specifiers.len()); - - for module_specifier in module_specifiers { - let nodes = doc_parser.parse_with_reexports(&module_specifier)?; - doc_nodes_by_url.insert(module_specifier, nodes); - } + let doc_nodes_by_url = doc_parser.parse()?; if doc_flags.lint { let diagnostics = doc_parser.take_diagnostics(); @@ -191,29 +185,9 @@ pub async fn doc( .await?; let (_, deno_ns) = deno_ns.into_iter().next().unwrap(); - let short_path = Rc::new(ShortPath::new( - ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(), - None, - None, - None, - )); - - deno_doc::html::compute_namespaced_symbols( - &deno_ns - .into_iter() - .map(|node| deno_doc::html::DocNodeWithContext { - origin: short_path.clone(), - ns_qualifiers: Rc::new([]), - kind_with_drilldown: - deno_doc::html::DocNodeKindWithDrilldown::Other(node.kind()), - inner: Rc::new(node), - drilldown_name: None, - parent: None, - }) - .collect::>(), - ) + Some(deno_ns) } else { - Default::default() + None }; let mut main_entrypoint = None; @@ -393,7 +367,7 @@ impl UsageComposer for DocComposer { fn generate_docs_directory( doc_nodes_by_url: IndexMap>, html_options: &DocHtmlFlag, - deno_ns: std::collections::HashMap, Option>>, + built_in_types: Option>, rewrite_map: Option>, main_entrypoint: Option, ) -> Result<(), AnyError> { @@ -426,12 +400,12 @@ fn generate_docs_directory( None }; - let options = deno_doc::html::GenerateOptions { + let mut options = deno_doc::html::GenerateOptions { package_name: html_options.name.clone(), main_entrypoint, rewrite_map, href_resolver: Rc::new(DocResolver { - deno_ns, + deno_ns: Default::default(), strip_trailing_html: html_options.strip_trailing_html, }), usage_composer: Rc::new(DocComposer), @@ -451,7 +425,58 @@ fn generate_docs_directory( })), }; - let mut files = deno_doc::html::generate(options, doc_nodes_by_url) + if let Some(built_in_types) = built_in_types { + let ctx = deno_doc::html::GenerateCtx::create_basic( + deno_doc::html::GenerateOptions { + package_name: None, + main_entrypoint: Some( + ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(), + ), + href_resolver: Rc::new(DocResolver { + deno_ns: Default::default(), + strip_trailing_html: false, + }), + usage_composer: Rc::new(DocComposer), + rewrite_map: Default::default(), + category_docs: Default::default(), + disable_search: Default::default(), + symbol_redirect_map: Default::default(), + default_symbol_map: Default::default(), + markdown_renderer: deno_doc::html::comrak::create_renderer( + None, None, None, + ), + markdown_stripper: Rc::new(deno_doc::html::comrak::strip), + head_inject: None, + }, + IndexMap::from([( + ModuleSpecifier::parse("file:///lib.deno.d.ts").unwrap(), + built_in_types, + )]), + )?; + + let deno_ns = deno_doc::html::compute_namespaced_symbols( + &ctx, + Box::new( + ctx + .doc_nodes + .values() + .next() + .unwrap() + .iter() + .map(std::borrow::Cow::Borrowed), + ), + ); + + options.href_resolver = Rc::new(DocResolver { + deno_ns, + strip_trailing_html: html_options.strip_trailing_html, + }); + } + + let ctx = + deno_doc::html::GenerateCtx::create_basic(options, doc_nodes_by_url)?; + + let mut files = deno_doc::html::generate(ctx) .context("Failed to generate HTML documentation")?; files.insert("prism.js".to_string(), PRISM_JS.to_string()); diff --git a/cli/tools/fmt.rs b/cli/tools/fmt.rs index 3411d557bf9bcb..d0948fd4f7c3e2 100644 --- a/cli/tools/fmt.rs +++ b/cli/tools/fmt.rs @@ -26,7 +26,6 @@ use deno_config::glob::FilePatterns; use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures; use deno_core::parking_lot::Mutex; @@ -167,7 +166,7 @@ fn resolve_paths_with_options_batches( Vec::with_capacity(members_fmt_options.len()); for (_ctx, member_fmt_options) in members_fmt_options { let files = - collect_fmt_files(cli_options, member_fmt_options.files.clone())?; + collect_fmt_files(cli_options, member_fmt_options.files.clone()); if !files.is_empty() { paths_with_options_batches.push(PathsWithOptions { base: member_fmt_options.files.base.clone(), @@ -177,7 +176,7 @@ fn resolve_paths_with_options_batches( } } if paths_with_options_batches.is_empty() { - return Err(generic_error("No target files found.")); + return Err(anyhow!("No target files found.")); } Ok(paths_with_options_batches) } @@ -224,7 +223,7 @@ async fn format_files( fn collect_fmt_files( cli_options: &CliOptions, files: FilePatterns, -) -> Result, AnyError> { +) -> Vec { FileCollector::new(|e| { is_supported_ext_fmt(e.path) || (e.path.extension().is_none() && cli_options.ext_flag().is_some()) @@ -484,7 +483,7 @@ pub fn format_html( } if let Some(error_msg) = inner(&error, file_path) { - AnyError::from(generic_error(error_msg)) + AnyError::msg(error_msg) } else { AnyError::from(error) } @@ -732,9 +731,9 @@ impl Formatter for CheckFormatter { Ok(()) } else { let not_formatted_files_str = files_str(not_formatted_files_count); - Err(generic_error(format!( + Err(anyhow!( "Found {not_formatted_files_count} not formatted {not_formatted_files_str} in {checked_files_str}", - ))) + )) } } } diff --git a/cli/tools/info.rs b/cli/tools/info.rs index 74d65911249861..81b0af0a66b078 100644 --- a/cli/tools/info.rs +++ b/cli/tools/info.rs @@ -11,6 +11,7 @@ use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_core::serde_json; use deno_core::url; +use deno_error::JsErrorClass; use deno_graph::Dependency; use deno_graph::GraphKind; use deno_graph::Module; @@ -664,9 +665,10 @@ impl<'a> GraphDisplayContext<'a> { HttpsChecksumIntegrity(_) => "(checksum integrity error)", Decode(_) => "(loading decode error)", Loader(err) => { - match deno_runtime::errors::get_error_class_name(err) { - Some("NotCapable") => "(not capable, requires --allow-import)", - _ => "(loading error)", + if err.get_class() == "NotCapable" { + "(not capable, requires --allow-import)" + } else { + "(loading error)" } } Jsr(_) => "(loading error)", diff --git a/cli/tools/installer.rs b/cli/tools/installer.rs index 6dc065171a68e0..ff5744c07a7d47 100644 --- a/cli/tools/installer.rs +++ b/cli/tools/installer.rs @@ -12,9 +12,9 @@ use std::path::PathBuf; use std::sync::Arc; use deno_cache_dir::file_fetcher::CacheSetting; +use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_core::url::Url; @@ -54,9 +54,7 @@ fn validate_name(exec_name: &str) -> Result<(), AnyError> { if EXEC_NAME_RE.is_match(exec_name) { Ok(()) } else { - Err(generic_error(format!( - "Invalid executable name: {exec_name}" - ))) + Err(anyhow!("Invalid executable name: {exec_name}")) } } @@ -223,7 +221,7 @@ pub async fn uninstall( // ensure directory exists if let Ok(metadata) = fs::metadata(&installation_dir) { if !metadata.is_dir() { - return Err(generic_error("Installation path is not a directory")); + return Err(anyhow!("Installation path is not a directory")); } } @@ -247,10 +245,10 @@ pub async fn uninstall( } if !removed { - return Err(generic_error(format!( + return Err(anyhow!( "No installation found for {}", uninstall_flags.name - ))); + )); } // There might be some extra files to delete @@ -423,14 +421,14 @@ async fn create_install_shim( // ensure directory exists if let Ok(metadata) = fs::metadata(&shim_data.installation_dir) { if !metadata.is_dir() { - return Err(generic_error("Installation path is not a directory")); + return Err(anyhow!("Installation path is not a directory")); } } else { fs::create_dir_all(&shim_data.installation_dir)?; }; if shim_data.file_path.exists() && !install_flags_global.force { - return Err(generic_error( + return Err(anyhow!( "Existing installation found. Aborting (Use -f to overwrite).", )); }; @@ -492,7 +490,7 @@ async fn resolve_shim_data( let name = match name { Some(name) => name, - None => return Err(generic_error( + None => return Err(anyhow!( "An executable name was not provided. One could not be inferred from the URL. Aborting.", )), }; @@ -524,9 +522,7 @@ async fn resolve_shim_data( let log_level = match log_level { Level::Debug => "debug", Level::Info => "info", - _ => { - return Err(generic_error(format!("invalid log level {log_level}"))) - } + _ => return Err(anyhow!(format!("invalid log level {log_level}"))), }; executable_args.push(log_level.to_string()); } diff --git a/cli/tools/jupyter/mod.rs b/cli/tools/jupyter/mod.rs index bb39528f3e3cf5..78b7675420c084 100644 --- a/cli/tools/jupyter/mod.rs +++ b/cli/tools/jupyter/mod.rs @@ -2,9 +2,9 @@ use std::sync::Arc; +use deno_core::anyhow::anyhow; use deno_core::anyhow::bail; use deno_core::anyhow::Context; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::FutureExt; use deno_core::located_script_name; @@ -137,10 +137,10 @@ pub async fn kernel( } let cwd_url = Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| { - generic_error(format!( + anyhow!( "Unable to construct URL from the path of cwd: {}", cli_options.initial_cwd().to_string_lossy(), - )) + ) })?; repl_session.set_test_reporter_factory(Box::new(move || { Box::new( diff --git a/cli/tools/lint/mod.rs b/cli/tools/lint/mod.rs index e7b16f028317e2..36ba85f61300d9 100644 --- a/cli/tools/lint/mod.rs +++ b/cli/tools/lint/mod.rs @@ -18,7 +18,6 @@ use deno_config::glob::FileCollector; use deno_config::glob::FilePatterns; use deno_config::workspace::WorkspaceDirectory; use deno_core::anyhow::anyhow; -use deno_core::error::generic_error; use deno_core::error::AnyError; use deno_core::futures::future::LocalBoxFuture; use deno_core::futures::FutureExt; @@ -77,9 +76,7 @@ pub async fn lint( ) -> Result<(), AnyError> { if lint_flags.watch.is_some() { if lint_flags.is_stdin() { - return Err(generic_error( - "Lint watch on standard input is not supported.", - )); + return Err(anyhow!("Lint watch on standard input is not supported.",)); } return lint_with_watch(flags, lint_flags).await; @@ -223,7 +220,7 @@ fn resolve_paths_with_options_batches( let mut paths_with_options_batches = Vec::with_capacity(members_lint_options.len()); for (dir, lint_options) in members_lint_options { - let files = collect_lint_files(cli_options, lint_options.files.clone())?; + let files = collect_lint_files(cli_options, lint_options.files.clone()); if !files.is_empty() { paths_with_options_batches.push(PathsWithOptions { dir, @@ -233,7 +230,7 @@ fn resolve_paths_with_options_batches( } } if paths_with_options_batches.is_empty() { - return Err(generic_error("No target files found.")); + return Err(anyhow!("No target files found.")); } Ok(paths_with_options_batches) } @@ -446,7 +443,7 @@ impl WorkspaceLinter { fn collect_lint_files( cli_options: &CliOptions, files: FilePatterns, -) -> Result, AnyError> { +) -> Vec { FileCollector::new(|e| { is_script_ext(e.path) || (e.path.extension().is_none() && cli_options.ext_flag().is_some()) @@ -534,7 +531,7 @@ fn lint_stdin( } let mut source_code = String::new(); if stdin().read_to_string(&mut source_code).is_err() { - return Err(generic_error("Failed to read from stdin")); + return Err(anyhow!("Failed to read from stdin")); } let linter = CliLinter::new(CliLinterOptions { diff --git a/cli/tools/lint/rules/no_sloppy_imports.rs b/cli/tools/lint/rules/no_sloppy_imports.rs index 33d3090fe32192..825835f3b5bb36 100644 --- a/cli/tools/lint/rules/no_sloppy_imports.rs +++ b/cli/tools/lint/rules/no_sloppy_imports.rs @@ -7,7 +7,7 @@ use std::sync::Arc; use deno_ast::SourceRange; use deno_config::workspace::WorkspaceResolver; -use deno_core::anyhow::anyhow; +use deno_error::JsErrorBox; use deno_graph::source::ResolutionKind; use deno_graph::source::ResolveError; use deno_graph::Range; @@ -187,7 +187,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> { let resolution = self .workspace_resolver .resolve(specifier_text, &referrer_range.specifier) - .map_err(|err| ResolveError::Other(err.into()))?; + .map_err(|err| ResolveError::Other(JsErrorBox::from_err(err)))?; match resolution { deno_config::workspace::MappedResolution::Normal { @@ -220,7 +220,7 @@ impl<'a> deno_graph::source::Resolver for SloppyImportCaptureResolver<'a> { } | deno_config::workspace::MappedResolution::PackageJson { .. } => { // this error is ignored - Err(ResolveError::Other(anyhow!(""))) + Err(ResolveError::Other(JsErrorBox::generic(""))) } } } diff --git a/cli/tools/registry/paths.rs b/cli/tools/registry/paths.rs index ef988389132237..563b8412801b57 100644 --- a/cli/tools/registry/paths.rs +++ b/cli/tools/registry/paths.rs @@ -233,7 +233,7 @@ pub fn collect_publish_paths( ) -> Result, AnyError> { let diagnostics_collector = opts.diagnostics_collector; let publish_paths = - collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns)?; + collect_paths(opts.cli_options, diagnostics_collector, opts.file_patterns); let publish_paths_set = publish_paths.iter().cloned().collect::>(); let capacity = publish_paths.len() + opts.force_include_paths.len(); let mut paths = HashSet::with_capacity(capacity); @@ -321,7 +321,7 @@ fn collect_paths( cli_options: &CliOptions, diagnostics_collector: &PublishDiagnosticsCollector, file_patterns: FilePatterns, -) -> Result, AnyError> { +) -> Vec { FileCollector::new(|e| { if !e.metadata.file_type().is_file() { if let Ok(specifier) = ModuleSpecifier::from_file_path(e.path) { diff --git a/cli/tools/repl/channel.rs b/cli/tools/repl/channel.rs index 4ac28b9d521c80..dbafacebc92363 100644 --- a/cli/tools/repl/channel.rs +++ b/cli/tools/repl/channel.rs @@ -4,8 +4,10 @@ use std::cell::RefCell; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::serde_json; use deno_core::serde_json::Value; +use deno_error::JsErrorBox; use tokio::sync::mpsc::channel; use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::Receiver; @@ -47,7 +49,7 @@ pub enum RustylineSyncMessage { } pub enum RustylineSyncResponse { - PostMessage(Result), + PostMessage(Result), LspCompletions(Vec), } @@ -61,7 +63,7 @@ impl RustylineSyncMessageSender { &self, method: &str, params: Option, - ) -> Result { + ) -> Result { if let Err(err) = self .message_tx @@ -69,10 +71,11 @@ impl RustylineSyncMessageSender { method: method.to_string(), params: params .map(|params| serde_json::to_value(params)) - .transpose()?, + .transpose() + .map_err(JsErrorBox::from_err)?, }) { - Err(anyhow!("{}", err)) + Err(JsErrorBox::from_err(err).into()) } else { match self.response_rx.borrow_mut().blocking_recv().unwrap() { RustylineSyncResponse::PostMessage(result) => result, diff --git a/cli/tools/repl/session.rs b/cli/tools/repl/session.rs index 7b20717649d57c..ed9dd61e2d579a 100644 --- a/cli/tools/repl/session.rs +++ b/cli/tools/repl/session.rs @@ -16,8 +16,9 @@ use deno_ast::ParsedSource; use deno_ast::SourcePos; use deno_ast::SourceRangedForSpanned; use deno_ast::SourceTextInfo; -use deno_core::error::generic_error; +use deno_core::anyhow::anyhow; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::futures::channel::mpsc::UnboundedReceiver; use deno_core::futures::FutureExt; use deno_core::futures::StreamExt; @@ -27,6 +28,7 @@ use deno_core::unsync::spawn; use deno_core::url::Url; use deno_core::LocalInspectorSession; use deno_core::PollEventLoopOptions; +use deno_error::JsErrorBox; use deno_graph::Position; use deno_graph::PositionRange; use deno_graph::SpecifierWithRange; @@ -250,10 +252,10 @@ impl ReplSession { let cwd_url = Url::from_directory_path(cli_options.initial_cwd()).map_err(|_| { - generic_error(format!( + anyhow!( "Unable to construct URL from the path of cwd: {}", cli_options.initial_cwd().to_string_lossy(), - )) + ) })?; let ts_config_for_emit = cli_options .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; @@ -322,7 +324,7 @@ impl ReplSession { &mut self, method: &str, params: Option, - ) -> Result { + ) -> Result { self .worker .js_runtime @@ -339,7 +341,7 @@ impl ReplSession { .await } - pub async fn run_event_loop(&mut self) -> Result<(), AnyError> { + pub async fn run_event_loop(&mut self) -> Result<(), CoreError> { self.worker.run_event_loop(true).await } @@ -400,21 +402,29 @@ impl ReplSession { } Err(err) => { // handle a parsing diagnostic - match err.downcast_ref::() { + match crate::util::result::any_and_jserrorbox_downcast_ref::< + deno_ast::ParseDiagnostic, + >(&err) + { Some(diagnostic) => { Ok(EvaluationOutput::Error(format_diagnostic(diagnostic))) } - None => match err.downcast_ref::() { - Some(diagnostics) => Ok(EvaluationOutput::Error( - diagnostics - .0 - .iter() - .map(format_diagnostic) - .collect::>() - .join("\n\n"), - )), - None => Err(err), - }, + None => { + match crate::util::result::any_and_jserrorbox_downcast_ref::< + ParseDiagnosticsError, + >(&err) + { + Some(diagnostics) => Ok(EvaluationOutput::Error( + diagnostics + .0 + .iter() + .map(format_diagnostic) + .collect::>() + .join("\n\n"), + )), + None => Err(err), + } + } } } } @@ -742,7 +752,7 @@ impl ReplSession { async fn evaluate_expression( &mut self, expression: &str, - ) -> Result { + ) -> Result { self .post_message_with_event_loop( "Runtime.evaluate", @@ -765,7 +775,9 @@ impl ReplSession { }), ) .await - .and_then(|res| serde_json::from_value(res).map_err(|e| e.into())) + .and_then(|res| { + serde_json::from_value(res).map_err(|e| JsErrorBox::from_err(e).into()) + }) } } diff --git a/cli/tools/run/hmr.rs b/cli/tools/run/hmr.rs index 9f19aea0158306..913e119689029b 100644 --- a/cli/tools/run/hmr.rs +++ b/cli/tools/run/hmr.rs @@ -4,13 +4,13 @@ use std::collections::HashMap; use std::path::PathBuf; use std::sync::Arc; -use deno_core::error::generic_error; -use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::futures::StreamExt; use deno_core::serde_json::json; use deno_core::serde_json::{self}; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use deno_error::JsErrorBox; use deno_terminal::colors; use tokio::select; @@ -66,19 +66,19 @@ pub struct HmrRunner { #[async_trait::async_trait(?Send)] impl crate::worker::HmrRunner for HmrRunner { // TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs` - async fn start(&mut self) -> Result<(), AnyError> { + async fn start(&mut self) -> Result<(), CoreError> { self.enable_debugger().await } // TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs` - async fn stop(&mut self) -> Result<(), AnyError> { + async fn stop(&mut self) -> Result<(), CoreError> { self .watcher_communicator .change_restart_mode(WatcherRestartMode::Automatic); self.disable_debugger().await } - async fn run(&mut self) -> Result<(), AnyError> { + async fn run(&mut self) -> Result<(), CoreError> { self .watcher_communicator .change_restart_mode(WatcherRestartMode::Manual); @@ -87,13 +87,13 @@ impl crate::worker::HmrRunner for HmrRunner { select! { biased; Some(notification) = session_rx.next() => { - let notification = serde_json::from_value::(notification)?; + let notification = serde_json::from_value::(notification).map_err(JsErrorBox::from_err)?; if notification.method == "Runtime.exceptionThrown" { - let exception_thrown = serde_json::from_value::(notification.params)?; + let exception_thrown = serde_json::from_value::(notification.params).map_err(JsErrorBox::from_err)?; let (message, description) = exception_thrown.exception_details.get_message_and_description(); - break Err(generic_error(format!("{} {}", message, description))); + break Err(JsErrorBox::generic(format!("{} {}", message, description)).into()); } else if notification.method == "Debugger.scriptParsed" { - let params = serde_json::from_value::(notification.params)?; + let params = serde_json::from_value::(notification.params).map_err(JsErrorBox::from_err)?; if params.url.starts_with("file://") { let file_url = Url::parse(¶ms.url).unwrap(); let file_path = file_url.to_file_path().unwrap(); @@ -105,7 +105,7 @@ impl crate::worker::HmrRunner for HmrRunner { } } changed_paths = self.watcher_communicator.watch_for_changed_paths() => { - let changed_paths = changed_paths?; + let changed_paths = changed_paths.map_err(JsErrorBox::from_err)?; let Some(changed_paths) = changed_paths else { let _ = self.watcher_communicator.force_restart(); @@ -187,7 +187,7 @@ impl HmrRunner { } // TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs` - async fn enable_debugger(&mut self) -> Result<(), AnyError> { + async fn enable_debugger(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Debugger.enable", None) @@ -200,7 +200,7 @@ impl HmrRunner { } // TODO(bartlomieju): this code is duplicated in `cli/tools/coverage/mod.rs` - async fn disable_debugger(&mut self) -> Result<(), AnyError> { + async fn disable_debugger(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Debugger.disable", None) @@ -216,7 +216,7 @@ impl HmrRunner { &mut self, script_id: &str, source: &str, - ) -> Result { + ) -> Result { let result = self .session .post_message( @@ -229,15 +229,16 @@ impl HmrRunner { ) .await?; - Ok(serde_json::from_value::( - result, - )?) + Ok( + serde_json::from_value::(result) + .map_err(JsErrorBox::from_err)?, + ) } async fn dispatch_hmr_event( &mut self, script_id: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { let expr = format!( "dispatchEvent(new CustomEvent(\"hmr\", {{ detail: {{ path: \"{}\" }} }}));", script_id diff --git a/cli/tools/serve.rs b/cli/tools/serve.rs index 18eac1b3e119ac..c2c53c1b69f7f2 100644 --- a/cli/tools/serve.rs +++ b/cli/tools/serve.rs @@ -73,7 +73,7 @@ async fn do_serve( ) .await?; let worker_count = match worker_count { - None | Some(1) => return worker.run().await, + None | Some(1) => return worker.run().await.map_err(Into::into), Some(c) => c, }; @@ -133,7 +133,7 @@ async fn run_worker( worker.run_for_watcher().await?; Ok(0) } else { - worker.run().await + worker.run().await.map_err(Into::into) } } diff --git a/cli/tools/test/channel.rs b/cli/tools/test/channel.rs index 29f24d65f144f9..68633b17e62b18 100644 --- a/cli/tools/test/channel.rs +++ b/cli/tools/test/channel.rs @@ -37,7 +37,8 @@ const HALF_SYNC_MARKER: &[u8; 4] = &[226, 128, 139, 0]; const BUFFER_SIZE: usize = 4096; /// The test channel has been closed and cannot be used to send further messages. -#[derive(Debug, Copy, Clone, Eq, PartialEq)] +#[derive(Debug, Copy, Clone, Eq, PartialEq, deno_error::JsError)] +#[class(generic)] pub struct ChannelClosedError; impl std::error::Error for ChannelClosedError {} diff --git a/cli/tools/test/mod.rs b/cli/tools/test/mod.rs index 2bd9cd3d7cbc38..21ee7e152d159b 100644 --- a/cli/tools/test/mod.rs +++ b/cli/tools/test/mod.rs @@ -25,10 +25,9 @@ use deno_cache_dir::file_fetcher::File; use deno_config::glob::FilePatterns; use deno_config::glob::WalkEntry; use deno_core::anyhow; -use deno_core::anyhow::bail; -use deno_core::anyhow::Context as _; -use deno_core::error::generic_error; +use deno_core::anyhow::anyhow; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::error::JsError; use deno_core::futures::future; use deno_core::futures::stream; @@ -49,6 +48,7 @@ use deno_core::v8; use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_core::PollEventLoopOptions; +use deno_error::JsErrorBox; use deno_runtime::deno_io::Stdio; use deno_runtime::deno_io::StdioPipe; use deno_runtime::deno_permissions::Permissions; @@ -106,6 +106,8 @@ use reporters::PrettyTestReporter; use reporters::TapTestReporter; use reporters::TestReporter; +use crate::tools::test::channel::ChannelClosedError; + /// How many times we're allowed to spin the event loop before considering something a leak. const MAX_SANITIZER_LOOP_SPINS: usize = 16; @@ -612,7 +614,7 @@ async fn configure_main_worker( permissions_container: PermissionsContainer, worker_sender: TestEventWorkerSender, options: &TestSpecifierOptions, -) -> Result<(Option>, MainWorker), anyhow::Error> { +) -> Result<(Option>, MainWorker), CoreError> { let mut worker = worker_factory .create_custom_worker( WorkerExecutionMode::Test, @@ -640,21 +642,15 @@ async fn configure_main_worker( let mut worker = worker.into_main_worker(); match res { Ok(()) => Ok(()), - Err(error) => { - // TODO(mmastrac): It would be nice to avoid having this error pattern repeated - if error.is::() { - send_test_event( - &worker.js_runtime.op_state(), - TestEvent::UncaughtError( - specifier.to_string(), - Box::new(error.downcast::().unwrap()), - ), - )?; - Ok(()) - } else { - Err(error) - } + Err(CoreError::Js(err)) => { + send_test_event( + &worker.js_runtime.op_state(), + TestEvent::UncaughtError(specifier.to_string(), Box::new(err)), + ) + .map_err(JsErrorBox::from_err)?; + Ok(()) } + Err(err) => Err(err), }?; Ok((coverage_collector, worker)) } @@ -691,21 +687,14 @@ pub async fn test_specifier( .await { Ok(()) => Ok(()), - Err(error) => { - // TODO(mmastrac): It would be nice to avoid having this error pattern repeated - if error.is::() { - send_test_event( - &worker.js_runtime.op_state(), - TestEvent::UncaughtError( - specifier.to_string(), - Box::new(error.downcast::().unwrap()), - ), - )?; - Ok(()) - } else { - Err(error) - } + Err(CoreError::Js(err)) => { + send_test_event( + &worker.js_runtime.op_state(), + TestEvent::UncaughtError(specifier.to_string(), Box::new(err)), + )?; + Ok(()) } + Err(e) => Err(e.into()), } } @@ -718,7 +707,7 @@ async fn test_specifier_inner( specifier: ModuleSpecifier, fail_fast_tracker: FailFastTracker, options: TestSpecifierOptions, -) -> Result<(), AnyError> { +) -> Result<(), CoreError> { // Ensure that there are no pending exceptions before we start running tests worker.run_up_to_duration(Duration::from_millis(0)).await?; @@ -765,7 +754,7 @@ pub fn worker_has_tests(worker: &mut MainWorker) -> bool { /// Yields to tokio to allow async work to process, and then polls /// the event loop once. #[must_use = "The event loop result should be checked"] -pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), AnyError> { +pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), CoreError> { // Allow any ops that to do work in the tokio event loop to do so tokio::task::yield_now().await; // Spin the event loop once @@ -784,13 +773,11 @@ pub async fn poll_event_loop(worker: &mut MainWorker) -> Result<(), AnyError> { pub fn send_test_event( op_state: &RefCell, event: TestEvent, -) -> Result<(), AnyError> { - Ok( - op_state - .borrow_mut() - .borrow_mut::() - .send(event)?, - ) +) -> Result<(), ChannelClosedError> { + op_state + .borrow_mut() + .borrow_mut::() + .send(event) } pub async fn run_tests_for_worker( @@ -986,13 +973,10 @@ async fn run_tests_for_worker_inner( let result = match result { Ok(r) => r, Err(error) => { - if error.is::() { + if let CoreError::Js(js_error) = error { send_test_event( &state_rc, - TestEvent::UncaughtError( - specifier.to_string(), - Box::new(error.downcast::().unwrap()), - ), + TestEvent::UncaughtError(specifier.to_string(), Box::new(js_error)), )?; fail_fast_tracker.add_failure(); send_test_event( @@ -1002,7 +986,7 @@ async fn run_tests_for_worker_inner( had_uncaught_error = true; continue; } else { - return Err(error); + return Err(error.into()); } } }; @@ -1374,25 +1358,20 @@ pub async fn report_tests( reporter.report_summary(&elapsed, &tests, &test_steps); if let Err(err) = reporter.flush_report(&elapsed, &tests, &test_steps) { return ( - Err(generic_error(format!( - "Test reporter failed to flush: {}", - err - ))), + Err(anyhow!("Test reporter failed to flush: {}", err)), receiver, ); } if used_only { return ( - Err(generic_error( - "Test failed because the \"only\" option was used", - )), + Err(anyhow!("Test failed because the \"only\" option was used",)), receiver, ); } if failed { - return (Err(generic_error("Test failed")), receiver); + return (Err(anyhow!("Test failed")), receiver); } (Ok(()), receiver) @@ -1575,7 +1554,7 @@ pub async fn run_tests( if !workspace_test_options.permit_no_files && specifiers_with_mode.is_empty() { - return Err(generic_error("No test modules found")); + return Err(anyhow!("No test modules found")); } let doc_tests = get_doc_tests(&specifiers_with_mode, file_fetcher).await?; @@ -1611,10 +1590,10 @@ pub async fn run_tests( TestSpecifiersOptions { cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err( |_| { - generic_error(format!( + anyhow!( "Unable to construct URL from the path of cwd: {}", cli_options.initial_cwd().to_string_lossy(), - )) + ) }, )?, concurrent_jobs: workspace_test_options.concurrent_jobs, @@ -1793,10 +1772,10 @@ pub async fn run_tests_with_watch( TestSpecifiersOptions { cwd: Url::from_directory_path(cli_options.initial_cwd()).map_err( |_| { - generic_error(format!( + anyhow!( "Unable to construct URL from the path of cwd: {}", cli_options.initial_cwd().to_string_lossy(), - )) + ) }, )?, concurrent_jobs: workspace_test_options.concurrent_jobs, diff --git a/cli/tools/test/reporters/compound.rs b/cli/tools/test/reporters/compound.rs index e381dd09778748..3c4409ecaa64f9 100644 --- a/cli/tools/test/reporters/compound.rs +++ b/cli/tools/test/reporters/compound.rs @@ -129,7 +129,7 @@ impl TestReporter for CompoundTestReporter { if errors.is_empty() { Ok(()) } else { - bail!( + anyhow::bail!( "error in one or more wrapped reporters:\n{}", errors .iter() diff --git a/cli/tools/test/reporters/junit.rs b/cli/tools/test/reporters/junit.rs index 42ced0760060ad..9418ac9fb2b985 100644 --- a/cli/tools/test/reporters/junit.rs +++ b/cli/tools/test/reporters/junit.rs @@ -3,6 +3,8 @@ use std::collections::VecDeque; use std::path::PathBuf; +use deno_core::anyhow::Context; + use super::fmt::to_relative_path_or_remote_url; use super::*; diff --git a/cli/tsc/diagnostics.rs b/cli/tsc/diagnostics.rs index ac93c8575d784b..3780f65e771079 100644 --- a/cli/tsc/diagnostics.rs +++ b/cli/tsc/diagnostics.rs @@ -320,7 +320,8 @@ impl fmt::Display for Diagnostic { } } -#[derive(Clone, Debug, Default, Eq, PartialEq)] +#[derive(Clone, Debug, Default, Eq, PartialEq, deno_error::JsError)] +#[class(generic)] pub struct Diagnostics(Vec); impl Diagnostics { diff --git a/cli/tsc/mod.rs b/cli/tsc/mod.rs index 3176c50d5c7e70..1473b8a8d91a7e 100644 --- a/cli/tsc/mod.rs +++ b/cli/tsc/mod.rs @@ -3,12 +3,10 @@ use std::borrow::Cow; use std::collections::HashMap; use std::fmt; -use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use deno_ast::MediaType; -use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::ascii_str; use deno_core::error::AnyError; @@ -454,13 +452,6 @@ impl State { } } -fn normalize_specifier( - specifier: &str, - current_dir: &Path, -) -> Result { - resolve_url_or_path(specifier, current_dir).map_err(|err| err.into()) -} - #[op2] #[string] fn op_create_hash(s: &mut OpState, #[string] text: &str) -> String { @@ -531,6 +522,21 @@ pub fn as_ts_script_kind(media_type: MediaType) -> i32 { pub const MISSING_DEPENDENCY_SPECIFIER: &str = "internal:///missing_dependency.d.ts"; +#[derive(Debug, Error, deno_error::JsError)] +pub enum LoadError { + #[class(generic)] + #[error("Unable to load {path}: {error}")] + LoadFromNodeModule { path: String, error: std::io::Error }, + #[class(inherit)] + #[error( + "Error converting a string module specifier for \"op_resolve\": {0}" + )] + ModuleResolution(#[from] deno_core::ModuleResolutionError), + #[class(inherit)] + #[error("{0}")] + ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError), +} + #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct LoadResponse { @@ -545,24 +551,28 @@ struct LoadResponse { fn op_load( state: &mut OpState, #[string] load_specifier: &str, -) -> Result, AnyError> { +) -> Result, LoadError> { op_load_inner(state, load_specifier) } fn op_load_inner( state: &mut OpState, load_specifier: &str, -) -> Result, AnyError> { +) -> Result, LoadError> { fn load_from_node_modules( specifier: &ModuleSpecifier, npm_state: Option<&RequestNpmState>, media_type: &mut MediaType, is_cjs: &mut bool, - ) -> Result { + ) -> Result { *media_type = MediaType::from_specifier(specifier); let file_path = specifier.to_file_path().unwrap(); - let code = std::fs::read_to_string(&file_path) - .with_context(|| format!("Unable to load {}", file_path.display()))?; + let code = std::fs::read_to_string(&file_path).map_err(|err| { + LoadError::LoadFromNodeModule { + path: file_path.display().to_string(), + error: err, + } + })?; let code: Arc = code.into(); *is_cjs = npm_state .map(|npm_state| { @@ -575,8 +585,7 @@ fn op_load_inner( let state = state.borrow_mut::(); - let specifier = normalize_specifier(load_specifier, &state.current_dir) - .context("Error converting a string module specifier for \"op_load\".")?; + let specifier = resolve_url_or_path(load_specifier, &state.current_dir)?; let mut hash: Option = None; let mut media_type = MediaType::Unknown; @@ -688,6 +697,26 @@ fn op_load_inner( })) } +#[derive(Debug, Error, deno_error::JsError)] +pub enum ResolveError { + #[class(inherit)] + #[error( + "Error converting a string module specifier for \"op_resolve\": {0}" + )] + ModuleResolution(#[from] deno_core::ModuleResolutionError), + #[class(inherit)] + #[error("{0}")] + PackageSubpathResolve(PackageSubpathResolveError), + #[class(inherit)] + #[error("{0}")] + ResolvePkgFolderFromDenoModule( + #[from] crate::npm::ResolvePkgFolderFromDenoModuleError, + ), + #[class(inherit)] + #[error("{0}")] + ResolveNonGraphSpecifierTypes(#[from] ResolveNonGraphSpecifierTypesError), +} + #[derive(Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct ResolveArgs { @@ -717,7 +746,7 @@ fn op_resolve( state: &mut OpState, #[string] base: String, #[serde] specifiers: Vec<(bool, String)>, -) -> Result, AnyError> { +) -> Result, ResolveError> { op_resolve_inner(state, ResolveArgs { base, specifiers }) } @@ -725,7 +754,7 @@ fn op_resolve( fn op_resolve_inner( state: &mut OpState, args: ResolveArgs, -) -> Result, AnyError> { +) -> Result, ResolveError> { let state = state.borrow_mut::(); let mut resolved: Vec<(String, &'static str)> = Vec::with_capacity(args.specifiers.len()); @@ -734,9 +763,7 @@ fn op_resolve_inner( { remapped_specifier.clone() } else { - normalize_specifier(&args.base, &state.current_dir).context( - "Error converting a string module specifier for \"op_resolve\".", - )? + resolve_url_or_path(&args.base, &state.current_dir)? }; let referrer_module = state.graph.get(&referrer); for (is_cjs, specifier) in args.specifiers { @@ -852,7 +879,7 @@ fn resolve_graph_specifier_types( referrer: &ModuleSpecifier, resolution_mode: ResolutionMode, state: &State, -) -> Result, AnyError> { +) -> Result, ResolveError> { let graph = &state.graph; let maybe_module = match graph.try_get(specifier) { Ok(Some(module)) => Some(module), @@ -914,7 +941,7 @@ fn resolve_graph_specifier_types( Err(err) => match err.code() { NodeJsErrorCode::ERR_TYPES_NOT_FOUND | NodeJsErrorCode::ERR_MODULE_NOT_FOUND => None, - _ => return Err(err.into()), + _ => return Err(ResolveError::PackageSubpathResolve(err)), }, }; Ok(Some(into_specifier_and_media_type(maybe_url))) @@ -936,10 +963,12 @@ fn resolve_graph_specifier_types( } } -#[derive(Debug, Error)] -enum ResolveNonGraphSpecifierTypesError { +#[derive(Debug, Error, deno_error::JsError)] +pub enum ResolveNonGraphSpecifierTypesError { + #[class(inherit)] #[error(transparent)] ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), + #[class(inherit)] #[error(transparent)] PackageSubpathResolve(#[from] PackageSubpathResolveError), } @@ -1036,10 +1065,20 @@ fn op_respond_inner(state: &mut OpState, args: RespondArgs) { state.maybe_response = Some(args); } +#[derive(Debug, Error, deno_error::JsError)] +pub enum ExecError { + #[class(generic)] + #[error("The response for the exec request was not set.")] + ResponseNotSet, + #[class(inherit)] + #[error(transparent)] + Core(deno_core::error::CoreError), +} + /// Execute a request on the supplied snapshot, returning a response which /// contains information, like any emitted files, diagnostics, statistics and /// optionally an updated TypeScript build info. -pub fn exec(request: Request) -> Result { +pub fn exec(request: Request) -> Result { // tsc cannot handle root specifiers that don't have one of the "acceptable" // extensions. Therefore, we have to check the root modules against their // extensions and remap any that are unacceptable to tsc and add them to the @@ -1115,7 +1154,9 @@ pub fn exec(request: Request) -> Result { ..Default::default() }); - runtime.execute_script(located_script_name!(), exec_source)?; + runtime + .execute_script(located_script_name!(), exec_source) + .map_err(ExecError::Core)?; let op_state = runtime.op_state(); let mut op_state = op_state.borrow_mut(); @@ -1132,7 +1173,7 @@ pub fn exec(request: Request) -> Result { stats, }) } else { - Err(anyhow!("The response for the exec request was not set.")) + Err(ExecError::ResponseNotSet) } } @@ -1141,6 +1182,7 @@ mod tests { use deno_core::futures::future; use deno_core::serde_json; use deno_core::OpState; + use deno_error::JsErrorBox; use deno_graph::GraphKind; use deno_graph::ModuleGraph; use test_util::PathRef; @@ -1167,13 +1209,20 @@ mod tests { .replace("://", "_") .replace('/', "-"); let source_path = self.fixtures.join(specifier_text); - let response = source_path.read_to_bytes_if_exists().map(|c| { - Some(deno_graph::source::LoadResponse::Module { - specifier: specifier.clone(), - maybe_headers: None, - content: c.into(), + let response = source_path + .read_to_bytes_if_exists() + .map(|c| { + Some(deno_graph::source::LoadResponse::Module { + specifier: specifier.clone(), + maybe_headers: None, + content: c.into(), + }) }) - }); + .map_err(|e| { + deno_graph::source::LoadError::Other(Arc::new(JsErrorBox::generic( + e.to_string(), + ))) + }); Box::pin(future::ready(response)) } } @@ -1210,7 +1259,7 @@ mod tests { async fn test_exec( specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { let hash_data = 123; // something random let fixtures = test_util::testdata_path().join("tsc2"); let loader = MockLoader { fixtures }; diff --git a/cli/util/file_watcher.rs b/cli/util/file_watcher.rs index ca769b70fe5349..65963214b9827e 100644 --- a/cli/util/file_watcher.rs +++ b/cli/util/file_watcher.rs @@ -10,7 +10,7 @@ use std::time::Duration; use deno_config::glob::PathOrPatternSet; use deno_core::error::AnyError; -use deno_core::error::JsError; +use deno_core::error::CoreError; use deno_core::futures::Future; use deno_core::futures::FutureExt; use deno_core::parking_lot::Mutex; @@ -23,6 +23,7 @@ use notify::RecommendedWatcher; use notify::RecursiveMode; use notify::Watcher; use tokio::select; +use tokio::sync::broadcast::error::RecvError; use tokio::sync::mpsc; use tokio::sync::mpsc::UnboundedReceiver; use tokio::time::sleep; @@ -80,10 +81,13 @@ where { let result = watch_future.await; if let Err(err) = result { - let error_string = match err.downcast_ref::() { - Some(e) => format_js_error(e), - None => format!("{err:?}"), - }; + let error_string = + match crate::util::result::any_and_jserrorbox_downcast_ref::( + &err, + ) { + Some(CoreError::Js(e)) => format_js_error(e), + _ => format!("{err:?}"), + }; log::error!( "{}: {}", colors::red_bold("error"), @@ -171,9 +175,9 @@ impl WatcherCommunicator { pub async fn watch_for_changed_paths( &self, - ) -> Result>, AnyError> { + ) -> Result>, RecvError> { let mut rx = self.changed_paths_rx.resubscribe(); - rx.recv().await.map_err(AnyError::from) + rx.recv().await } pub fn change_restart_mode(&self, restart_mode: WatcherRestartMode) { diff --git a/cli/util/fs.rs b/cli/util/fs.rs index 61f1786ee98e70..d9cebe10d51de3 100644 --- a/cli/util/fs.rs +++ b/cli/util/fs.rs @@ -13,7 +13,6 @@ use deno_config::glob::PathOrPattern; use deno_config::glob::PathOrPatternSet; use deno_config::glob::WalkEntry; use deno_core::anyhow::anyhow; -use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::unsync::spawn_blocking; use deno_core::ModuleSpecifier; @@ -129,7 +128,7 @@ pub fn collect_specifiers( .ignore_git_folder() .ignore_node_modules() .set_vendor_folder(vendor_folder) - .collect_file_patterns(&CliSys::default(), files)?; + .collect_file_patterns(&CliSys::default(), files); let mut collected_files_as_urls = collected_files .iter() .map(|f| specifier_from_file_path(f).unwrap()) @@ -169,7 +168,7 @@ pub fn clone_dir_recursive< sys: &TSys, from: &Path, to: &Path, -) -> Result<(), AnyError> { +) -> Result<(), CopyDirRecursiveError> { if cfg!(target_vendor = "apple") { if let Some(parent) = to.parent() { sys.fs_create_dir_all(parent)?; @@ -200,6 +199,47 @@ pub fn clone_dir_recursive< Ok(()) } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum CopyDirRecursiveError { + #[class(inherit)] + #[error("Creating {path}")] + Creating { + path: PathBuf, + #[source] + #[inherit] + source: Error, + }, + #[class(inherit)] + #[error("Creating {path}")] + Reading { + path: PathBuf, + #[source] + #[inherit] + source: Error, + }, + #[class(inherit)] + #[error("Dir {from} to {to}")] + Dir { + from: PathBuf, + to: PathBuf, + #[source] + #[inherit] + source: Box, + }, + #[class(inherit)] + #[error("Copying {from} to {to}")] + Copying { + from: PathBuf, + to: PathBuf, + #[source] + #[inherit] + source: Error, + }, + #[class(inherit)] + #[error(transparent)] + Other(#[from] Error), +} + /// Copies a directory to another directory. /// /// Note: Does not handle symlinks. @@ -213,13 +253,20 @@ pub fn copy_dir_recursive< sys: &TSys, from: &Path, to: &Path, -) -> Result<(), AnyError> { - sys - .fs_create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = sys - .fs_read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; +) -> Result<(), CopyDirRecursiveError> { + sys.fs_create_dir_all(to).map_err(|source| { + CopyDirRecursiveError::Creating { + path: to.to_path_buf(), + source, + } + })?; + let read_dir = + sys + .fs_read_dir(from) + .map_err(|source| CopyDirRecursiveError::Reading { + path: from.to_path_buf(), + source, + })?; for entry in read_dir { let entry = entry?; @@ -228,12 +275,20 @@ pub fn copy_dir_recursive< let new_to = to.join(entry.file_name()); if file_type.is_dir() { - copy_dir_recursive(sys, &new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) + copy_dir_recursive(sys, &new_from, &new_to).map_err(|source| { + CopyDirRecursiveError::Dir { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source: Box::new(source), + } })?; } else if file_type.is_file() { - sys.fs_copy(&new_from, &new_to).with_context(|| { - format!("Copying {} to {}", new_from.display(), new_to.display()) + sys.fs_copy(&new_from, &new_to).map_err(|source| { + CopyDirRecursiveError::Copying { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source, + } })?; } } diff --git a/cli/util/result.rs b/cli/util/result.rs index 6a67416f269338..0c1a75b1ce0c08 100644 --- a/cli/util/result.rs +++ b/cli/util/result.rs @@ -1,6 +1,13 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::convert::Infallible; +use std::fmt::Debug; +use std::fmt::Display; + +use deno_core::error::AnyError; +use deno_core::error::CoreError; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; pub trait InfallibleResultExt { fn unwrap_infallible(self) -> T; @@ -14,3 +21,23 @@ impl InfallibleResultExt for Result { } } } + +pub fn any_and_jserrorbox_downcast_ref< + E: Display + Debug + Send + Sync + 'static, +>( + err: &AnyError, +) -> Option<&E> { + err + .downcast_ref::() + .or_else(|| { + err + .downcast_ref::() + .and_then(|e| e.as_any().downcast_ref::()) + }) + .or_else(|| { + err.downcast_ref::().and_then(|e| match e { + CoreError::JsNative(e) => e.as_any().downcast_ref::(), + _ => None, + }) + }) +} diff --git a/cli/worker.rs b/cli/worker.rs index 6289d00cf80323..eee89d663c42fd 100644 --- a/cli/worker.rs +++ b/cli/worker.rs @@ -8,6 +8,7 @@ use std::sync::Arc; use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::futures::FutureExt; use deno_core::url::Url; use deno_core::v8; @@ -17,6 +18,7 @@ use deno_core::FeatureChecker; use deno_core::ModuleLoader; use deno_core::PollEventLoopOptions; use deno_core::SharedArrayBufferStore; +use deno_error::JsErrorBox; use deno_runtime::code_cache; use deno_runtime::deno_broadcast_channel::InMemoryBroadcastChannel; use deno_runtime::deno_fs; @@ -50,7 +52,6 @@ use crate::args::CliLockfile; use crate::args::DenoSubcommand; use crate::args::NpmCachingStrategy; use crate::args::StorageKeyResolver; -use crate::errors; use crate::node::CliNodeResolver; use crate::node::CliPackageJsonResolver; use crate::npm::CliNpmResolver; @@ -80,9 +81,9 @@ pub trait ModuleLoaderFactory: Send + Sync { #[async_trait::async_trait(?Send)] pub trait HmrRunner: Send + Sync { - async fn start(&mut self) -> Result<(), AnyError>; - async fn stop(&mut self) -> Result<(), AnyError>; - async fn run(&mut self) -> Result<(), AnyError>; + async fn start(&mut self) -> Result<(), CoreError>; + async fn stop(&mut self) -> Result<(), CoreError>; + async fn run(&mut self) -> Result<(), CoreError>; } pub trait CliCodeCache: code_cache::CodeCache { @@ -195,7 +196,7 @@ impl CliMainWorker { Ok(()) } - pub async fn run(&mut self) -> Result { + pub async fn run(&mut self) -> Result { let mut maybe_coverage_collector = self.maybe_setup_coverage_collector().await?; let mut maybe_hmr_runner = self.maybe_setup_hmr_runner().await?; @@ -216,7 +217,7 @@ impl CliMainWorker { let result; select! { hmr_result = hmr_future => { - result = hmr_result; + result = hmr_result.map_err(Into::into); }, event_loop_result = event_loop_future => { result = event_loop_result; @@ -331,12 +332,12 @@ impl CliMainWorker { executor.execute().await } - pub async fn execute_main_module(&mut self) -> Result<(), AnyError> { + pub async fn execute_main_module(&mut self) -> Result<(), CoreError> { let id = self.worker.preload_main_module(&self.main_module).await?; self.worker.evaluate_module(id).await } - pub async fn execute_side_module(&mut self) -> Result<(), AnyError> { + pub async fn execute_side_module(&mut self) -> Result<(), CoreError> { let id = self.worker.preload_side_module(&self.main_module).await?; self.worker.evaluate_module(id).await } @@ -393,7 +394,7 @@ impl CliMainWorker { &mut self, name: &'static str, source_code: &'static str, - ) -> Result, AnyError> { + ) -> Result, CoreError> { self.worker.js_runtime.execute_script(name, source_code) } } @@ -465,7 +466,7 @@ impl CliMainWorkerFactory { &self, mode: WorkerExecutionMode, main_module: ModuleSpecifier, - ) -> Result { + ) -> Result { self .create_custom_worker( mode, @@ -484,7 +485,7 @@ impl CliMainWorkerFactory { permissions: PermissionsContainer, custom_extensions: Vec, stdio: deno_runtime::deno_io::Stdio, - ) -> Result { + ) -> Result { let shared = &self.shared; let CreateModuleLoaderResult { module_loader, @@ -513,16 +514,15 @@ impl CliMainWorkerFactory { } // use a fake referrer that can be used to discover the package.json if necessary - let referrer = - ModuleSpecifier::from_directory_path(self.shared.fs.cwd()?) - .unwrap() - .join("package.json")?; + let referrer = ModuleSpecifier::from_directory_path( + self.shared.fs.cwd().map_err(JsErrorBox::from_err)?, + ) + .unwrap() + .join("package.json")?; let package_folder = shared .npm_resolver - .resolve_pkg_folder_from_deno_module_req( - package_ref.req(), - &referrer, - )?; + .resolve_pkg_folder_from_deno_module_req(package_ref.req(), &referrer) + .map_err(JsErrorBox::from_err)?; let main_module = self .resolve_binary_entrypoint(&package_folder, package_ref.sub_path())?; @@ -633,7 +633,6 @@ impl CliMainWorkerFactory { should_break_on_first_statement: shared.options.inspect_brk, should_wait_for_inspector_session: shared.options.inspect_wait, strace_ops: shared.options.strace_ops.clone(), - get_error_class_fn: Some(&errors::get_error_class_name), cache_storage_dir, origin_storage_dir, stdio, @@ -834,7 +833,6 @@ fn create_web_worker_callback( create_web_worker_cb, format_js_error_fn: Some(Arc::new(format_js_error)), worker_type: args.worker_type, - get_error_class_fn: Some(&errors::get_error_class_name), stdio: stdio.clone(), cache_storage_dir, strace_ops: shared.options.strace_ops.clone(), diff --git a/ext/broadcast_channel/Cargo.toml b/ext/broadcast_channel/Cargo.toml index 48b4127b0e658d..27aa7df702ebd3 100644 --- a/ext/broadcast_channel/Cargo.toml +++ b/ext/broadcast_channel/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +deno_error.workspace = true thiserror.workspace = true tokio.workspace = true uuid.workspace = true diff --git a/ext/broadcast_channel/lib.rs b/ext/broadcast_channel/lib.rs index 4929153bfeac9e..ae709c674c5cc9 100644 --- a/ext/broadcast_channel/lib.rs +++ b/ext/broadcast_channel/lib.rs @@ -12,6 +12,7 @@ use deno_core::JsBuffer; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsErrorBox; pub use in_memory_broadcast_channel::InMemoryBroadcastChannel; pub use in_memory_broadcast_channel::InMemoryBroadcastChannelResource; use tokio::sync::broadcast::error::SendError as BroadcastSendError; @@ -19,18 +20,26 @@ use tokio::sync::mpsc::error::SendError as MpscSendError; pub const UNSTABLE_FEATURE_NAME: &str = "broadcast-channel"; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BroadcastChannelError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + deno_core::error::ResourceError, + ), + #[class(generic)] #[error(transparent)] MPSCSendError(MpscSendError>), + #[class(generic)] #[error(transparent)] BroadcastSendError( BroadcastSendError>, ), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[inherit] JsErrorBox), } impl From> @@ -100,10 +109,7 @@ pub fn op_broadcast_unsubscribe( where BC: BroadcastChannel + 'static, { - let resource = state - .resource_table - .get::(rid) - .map_err(BroadcastChannelError::Resource)?; + let resource = state.resource_table.get::(rid)?; let bc = state.borrow::(); bc.unsubscribe(&resource) } @@ -118,11 +124,7 @@ pub async fn op_broadcast_send( where BC: BroadcastChannel + 'static, { - let resource = state - .borrow() - .resource_table - .get::(rid) - .map_err(BroadcastChannelError::Resource)?; + let resource = state.borrow().resource_table.get::(rid)?; let bc = state.borrow().borrow::().clone(); bc.send(&resource, name, buf.to_vec()).await } @@ -136,11 +138,7 @@ pub async fn op_broadcast_recv( where BC: BroadcastChannel + 'static, { - let resource = state - .borrow() - .resource_table - .get::(rid) - .map_err(BroadcastChannelError::Resource)?; + let resource = state.borrow().resource_table.get::(rid)?; let bc = state.borrow().borrow::().clone(); bc.recv(&resource).await } diff --git a/ext/cache/Cargo.toml b/ext/cache/Cargo.toml index cb4eef87dfd3b8..b77630832f0fbb 100644 --- a/ext/cache/Cargo.toml +++ b/ext/cache/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +deno_error.workspace = true rusqlite.workspace = true serde.workspace = true sha2.workspace = true diff --git a/ext/cache/lib.rs b/ext/cache/lib.rs index 6ee7380cf0d783..d3bfe23defc467 100644 --- a/ext/cache/lib.rs +++ b/ext/cache/lib.rs @@ -6,7 +6,6 @@ use std::rc::Rc; use std::sync::Arc; use async_trait::async_trait; -use deno_core::error::type_error; use deno_core::op2; use deno_core::serde::Deserialize; use deno_core::serde::Serialize; @@ -14,22 +13,38 @@ use deno_core::ByteString; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsErrorBox; mod sqlite; pub use sqlite::SqliteBackedCache; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CacheError { + #[class(type)] + #[error("CacheStorage is not available in this context")] + ContextUnsupported, + #[class(generic)] #[error(transparent)] Sqlite(#[from] rusqlite::Error), + #[class(generic)] #[error(transparent)] JoinError(#[from] tokio::task::JoinError), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(JsErrorBox), + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), + #[class(generic)] + #[error("Failed to create cache storage directory {}", .dir.display())] + CacheStorageDirectory { + dir: PathBuf, + #[source] + source: std::io::Error, + }, } #[derive(Clone)] @@ -237,9 +252,7 @@ where state.put(cache); Ok(state.borrow::().clone()) } else { - Err(CacheError::Other(type_error( - "CacheStorage is not available in this context", - ))) + Err(CacheError::ContextUnsupported) } } diff --git a/ext/cache/sqlite.rs b/ext/cache/sqlite.rs index 8bd73b4799ae04..6587a52bac6d09 100644 --- a/ext/cache/sqlite.rs +++ b/ext/cache/sqlite.rs @@ -8,8 +8,6 @@ use std::time::SystemTime; use std::time::UNIX_EPOCH; use async_trait::async_trait; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; use deno_core::futures::future::poll_fn; use deno_core::parking_lot::Mutex; use deno_core::unsync::spawn_blocking; @@ -45,14 +43,12 @@ pub struct SqliteBackedCache { impl SqliteBackedCache { pub fn new(cache_storage_dir: PathBuf) -> Result { { - std::fs::create_dir_all(&cache_storage_dir) - .with_context(|| { - format!( - "Failed to create cache storage directory {}", - cache_storage_dir.display() - ) - }) - .map_err(CacheError::Other)?; + std::fs::create_dir_all(&cache_storage_dir).map_err(|source| { + CacheError::CacheStorageDirectory { + dir: cache_storage_dir.clone(), + source, + } + })?; let path = cache_storage_dir.join("cache_metadata.db"); let connection = rusqlite::Connection::open(&path).unwrap_or_else(|_| { panic!("failed to open cache db at {}", path.display()) @@ -385,7 +381,10 @@ impl CacheResponseResource { } } - async fn read(self: Rc, data: &mut [u8]) -> Result { + async fn read( + self: Rc, + data: &mut [u8], + ) -> Result { let resource = deno_core::RcRef::map(&self, |r| &r.file); let mut file = resource.borrow_mut().await; let nread = file.read(data).await?; diff --git a/ext/canvas/Cargo.toml b/ext/canvas/Cargo.toml index 0bc3dcb8fb6e58..11c6feabb8b585 100644 --- a/ext/canvas/Cargo.toml +++ b/ext/canvas/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_webgpu.workspace = true image = { version = "0.24.7", default-features = false, features = ["png"] } serde = { workspace = true, features = ["derive"] } diff --git a/ext/canvas/lib.rs b/ext/canvas/lib.rs index 533b8c3fb34d7b..91b4e44afe6c80 100644 --- a/ext/canvas/lib.rs +++ b/ext/canvas/lib.rs @@ -12,10 +12,12 @@ use image::RgbaImage; use serde::Deserialize; use serde::Serialize; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CanvasError { + #[class(type)] #[error("Color type '{0:?}' not supported")] UnsupportedColorType(ColorType), + #[class(generic)] #[error(transparent)] Image(#[from] image::ImageError), } diff --git a/ext/cron/Cargo.toml b/ext/cron/Cargo.toml index 821a2aa884f5b3..224508265a3250 100644 --- a/ext/cron/Cargo.toml +++ b/ext/cron/Cargo.toml @@ -18,6 +18,7 @@ anyhow.workspace = true async-trait.workspace = true chrono = { workspace = true, features = ["now"] } deno_core.workspace = true +deno_error.workspace = true saffron.workspace = true thiserror.workspace = true tokio.workspace = true diff --git a/ext/cron/lib.rs b/ext/cron/lib.rs index 2c8d534d54006c..b4f4938b5eb5d9 100644 --- a/ext/cron/lib.rs +++ b/ext/cron/lib.rs @@ -7,11 +7,12 @@ use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; -use deno_core::error::get_custom_error_class; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; pub use crate::interface::*; @@ -47,26 +48,35 @@ impl Resource for CronResource { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CronError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(type)] #[error("Cron name cannot exceed 64 characters: current length {0}")] NameExceeded(usize), + #[class(type)] #[error("Invalid cron name: only alphanumeric characters, whitespace, hyphens, and underscores are allowed")] NameInvalid, + #[class(type)] #[error("Cron with this name already exists")] AlreadyExists, + #[class(type)] #[error("Too many crons")] TooManyCrons, + #[class(type)] #[error("Invalid cron schedule")] InvalidCron, + #[class(type)] #[error("Invalid backoff schedule")] InvalidBackoff, + #[class(generic)] #[error(transparent)] AcquireError(#[from] tokio::sync::AcquireError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(JsErrorBox), } #[op2] @@ -119,7 +129,7 @@ where let resource = match state.resource_table.get::>(rid) { Ok(resource) => resource, Err(err) => { - if get_custom_error_class(&err) == Some("BadResource") { + if err.get_class() == "BadResource" { return Ok(false); } else { return Err(CronError::Resource(err)); diff --git a/ext/crypto/Cargo.toml b/ext/crypto/Cargo.toml index 8f7fea28d7ac12..96ddd1621f7736 100644 --- a/ext/crypto/Cargo.toml +++ b/ext/crypto/Cargo.toml @@ -23,6 +23,7 @@ const-oid = "0.9.0" ctr = "0.9.1" curve25519-dalek = "4.1.3" deno_core.workspace = true +deno_error.workspace = true deno_web.workspace = true ed448-goldilocks = { version = "0.8.3", features = ["zeroize"] } elliptic-curve = { version = "0.13.1", features = ["std", "pem"] } diff --git a/ext/crypto/decrypt.rs b/ext/crypto/decrypt.rs index 8a00ffd8cd912f..766f62d16ff234 100644 --- a/ext/crypto/decrypt.rs +++ b/ext/crypto/decrypt.rs @@ -70,26 +70,40 @@ pub enum DecryptAlgorithm { }, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum DecryptError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), + #[class(generic)] #[error(transparent)] Pkcs1(#[from] rsa::pkcs1::Error), + #[class("DOMExceptionOperationError")] #[error("Decryption failed")] Failed, + #[class(type)] #[error("invalid length")] InvalidLength, + #[class(type)] #[error("invalid counter length. Currently supported 32/64/128 bits")] InvalidCounterLength, + #[class(type)] #[error("tag length not equal to 128")] InvalidTagLength, + #[class("DOMExceptionOperationError")] #[error("invalid key or iv")] InvalidKeyOrIv, + #[class("DOMExceptionOperationError")] #[error("tried to decrypt too much data")] TooMuchData, + #[class(type)] #[error("iv length not equal to 12 or 16")] InvalidIvLength, + #[class("DOMExceptionOperationError")] #[error("{0}")] Rsa(rsa::Error), } diff --git a/ext/crypto/ed25519.rs b/ext/crypto/ed25519.rs index d64b6904dd6cf8..c56fdc7c6291ab 100644 --- a/ext/crypto/ed25519.rs +++ b/ext/crypto/ed25519.rs @@ -13,12 +13,15 @@ use spki::der::asn1::BitString; use spki::der::Decode; use spki::der::Encode; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum Ed25519Error { + #[class("DOMExceptionOperationError")] #[error("Failed to export key")] FailedExport, + #[class(generic)] #[error(transparent)] Der(#[from] rsa::pkcs1::der::Error), + #[class(generic)] #[error(transparent)] KeyRejected(#[from] ring::error::KeyRejected), } diff --git a/ext/crypto/encrypt.rs b/ext/crypto/encrypt.rs index f3464b503294fa..d94eb97cfda527 100644 --- a/ext/crypto/encrypt.rs +++ b/ext/crypto/encrypt.rs @@ -71,20 +71,31 @@ pub enum EncryptAlgorithm { }, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum EncryptError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), + #[class(type)] #[error("invalid length")] InvalidLength, + #[class("DOMExceptionOperationError")] #[error("invalid key or iv")] InvalidKeyOrIv, + #[class(type)] #[error("iv length not equal to 12 or 16")] InvalidIvLength, + #[class(type)] #[error("invalid counter length. Currently supported 32/64/128 bits")] InvalidCounterLength, + #[class("DOMExceptionOperationError")] #[error("tried to encrypt too much data")] TooMuchData, + #[class("DOMExceptionOperationError")] #[error("Encryption failed")] Failed, } diff --git a/ext/crypto/export_key.rs b/ext/crypto/export_key.rs index c4e41ef2da5265..c7d59e3cc544df 100644 --- a/ext/crypto/export_key.rs +++ b/ext/crypto/export_key.rs @@ -20,12 +20,19 @@ use spki::AlgorithmIdentifierOwned; use crate::shared::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ExportKeyError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), + #[class(generic)] #[error(transparent)] Der(#[from] spki::der::Error), + #[class("DOMExceptionNotSupportedError")] #[error("Unsupported named curve")] UnsupportedNamedCurve, } diff --git a/ext/crypto/generate_key.rs b/ext/crypto/generate_key.rs index 953e2f1df11b7c..211084af176a74 100644 --- a/ext/crypto/generate_key.rs +++ b/ext/crypto/generate_key.rs @@ -15,10 +15,16 @@ use serde::Deserialize; use crate::shared::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class("DOMExceptionOperationError")] pub enum GenerateKeyError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), #[error("Bad public exponent")] BadPublicExponent, #[error("Invalid HMAC key length")] diff --git a/ext/crypto/import_key.rs b/ext/crypto/import_key.rs index 4011f2536ba023..e9059bbdc6d246 100644 --- a/ext/crypto/import_key.rs +++ b/ext/crypto/import_key.rs @@ -14,10 +14,16 @@ use spki::der::Decode; use crate::shared::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class("DOMExceptionDataError")] pub enum ImportKeyError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), #[error("invalid modulus")] InvalidModulus, #[error("invalid public exponent")] diff --git a/ext/crypto/lib.rs b/ext/crypto/lib.rs index f468af5b07dac8..0d6eecb911f368 100644 --- a/ext/crypto/lib.rs +++ b/ext/crypto/lib.rs @@ -8,12 +8,12 @@ use aes_kw::KekAes192; use aes_kw::KekAes256; use base64::prelude::BASE64_URL_SAFE_NO_PAD; use base64::Engine; -use deno_core::error::not_supported; use deno_core::op2; use deno_core::unsync::spawn_blocking; use deno_core::JsBuffer; use deno_core::OpState; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use p256::elliptic_curve::sec1::FromEncodedPoint; use p256::pkcs8::DecodePrivateKey; pub use rand; @@ -129,63 +129,99 @@ deno_core::extension!(deno_crypto, }, ); -#[derive(Debug, thiserror::Error)] -pub enum Error { +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum CryptoError { + #[class(inherit)] #[error(transparent)] - General(#[from] SharedError), + General( + #[from] + #[inherit] + SharedError, + ), + #[class(inherit)] #[error(transparent)] - JoinError(#[from] tokio::task::JoinError), + JoinError( + #[from] + #[inherit] + tokio::task::JoinError, + ), + #[class(generic)] #[error(transparent)] Der(#[from] rsa::pkcs1::der::Error), + #[class(type)] #[error("Missing argument hash")] MissingArgumentHash, + #[class(type)] #[error("Missing argument saltLength")] MissingArgumentSaltLength, + #[class(type)] #[error("unsupported algorithm")] UnsupportedAlgorithm, + #[class(generic)] #[error(transparent)] KeyRejected(#[from] ring::error::KeyRejected), + #[class(generic)] #[error(transparent)] RSA(#[from] rsa::Error), + #[class(generic)] #[error(transparent)] Pkcs1(#[from] rsa::pkcs1::Error), + #[class(generic)] #[error(transparent)] Unspecified(#[from] ring::error::Unspecified), + #[class(type)] #[error("Invalid key format")] InvalidKeyFormat, + #[class(generic)] #[error(transparent)] P256Ecdsa(#[from] p256::ecdsa::Error), + #[class(type)] #[error("Unexpected error decoding private key")] DecodePrivateKey, + #[class(type)] #[error("Missing argument publicKey")] MissingArgumentPublicKey, + #[class(type)] #[error("Missing argument namedCurve")] MissingArgumentNamedCurve, + #[class(type)] #[error("Missing argument info")] MissingArgumentInfo, + #[class("DOMExceptionOperationError")] #[error("The length provided for HKDF is too large")] HKDFLengthTooLarge, + #[class(generic)] #[error(transparent)] Base64Decode(#[from] base64::DecodeError), + #[class(type)] #[error("Data must be multiple of 8 bytes")] DataInvalidSize, + #[class(type)] #[error("Invalid key length")] InvalidKeyLength, + #[class("DOMExceptionOperationError")] #[error("encryption error")] EncryptionError, + #[class("DOMExceptionOperationError")] #[error("decryption error - integrity check failed")] DecryptionError, + #[class("DOMExceptionQuotaExceededError")] #[error("The ArrayBufferView's byte length ({0}) exceeds the number of bytes of entropy available via this API (65536)")] ArrayBufferViewLengthExceeded(usize), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other( + #[from] + #[inherit] + JsErrorBox, + ), } #[op2] #[serde] pub fn op_crypto_base64url_decode( #[string] data: String, -) -> Result { +) -> Result { let data: Vec = BASE64_URL_SAFE_NO_PAD.decode(data)?; Ok(data.into()) } @@ -201,9 +237,9 @@ pub fn op_crypto_base64url_encode(#[buffer] data: JsBuffer) -> String { pub fn op_crypto_get_random_values( state: &mut OpState, #[buffer] out: &mut [u8], -) -> Result<(), Error> { +) -> Result<(), CryptoError> { if out.len() > 65536 { - return Err(Error::ArrayBufferViewLengthExceeded(out.len())); + return Err(CryptoError::ArrayBufferViewLengthExceeded(out.len())); } let maybe_seeded_rng = state.try_borrow_mut::(); @@ -255,7 +291,7 @@ pub struct SignArg { pub async fn op_crypto_sign_key( #[serde] args: SignArg, #[buffer] zero_copy: JsBuffer, -) -> Result { +) -> Result { deno_core::unsync::spawn_blocking(move || { let data = &*zero_copy; let algorithm = args.algorithm; @@ -264,7 +300,7 @@ pub async fn op_crypto_sign_key( Algorithm::RsassaPkcs1v15 => { use rsa::pkcs1v15::SigningKey; let private_key = RsaPrivateKey::from_pkcs1_der(&args.key.data)?; - match args.hash.ok_or_else(|| Error::MissingArgumentHash)? { + match args.hash.ok_or_else(|| CryptoError::MissingArgumentHash)? { CryptoHash::Sha1 => { let signing_key = SigningKey::::new(private_key); signing_key.sign(data) @@ -289,11 +325,11 @@ pub async fn op_crypto_sign_key( let salt_len = args .salt_length - .ok_or_else(|| Error::MissingArgumentSaltLength)? + .ok_or_else(|| CryptoError::MissingArgumentSaltLength)? as usize; let mut rng = OsRng; - match args.hash.ok_or_else(|| Error::MissingArgumentHash)? { + match args.hash.ok_or_else(|| CryptoError::MissingArgumentHash)? { CryptoHash::Sha1 => { let signing_key = Pss::new_with_salt::(salt_len); let hashed = Sha1::digest(data); @@ -320,7 +356,7 @@ pub async fn op_crypto_sign_key( Algorithm::Ecdsa => { let curve: &EcdsaSigningAlgorithm = args .named_curve - .ok_or_else(|| Error::Other(not_supported()))? + .ok_or_else(JsErrorBox::not_supported)? .into(); let rng = RingRand::SystemRandom::new(); @@ -330,7 +366,7 @@ pub async fn op_crypto_sign_key( if let Some(hash) = args.hash { match hash { CryptoHash::Sha256 | CryptoHash::Sha384 => (), - _ => return Err(Error::UnsupportedAlgorithm), + _ => return Err(CryptoError::UnsupportedAlgorithm), } }; @@ -340,17 +376,15 @@ pub async fn op_crypto_sign_key( signature.as_ref().to_vec() } Algorithm::Hmac => { - let hash: HmacAlgorithm = args - .hash - .ok_or_else(|| Error::Other(not_supported()))? - .into(); + let hash: HmacAlgorithm = + args.hash.ok_or_else(JsErrorBox::not_supported)?.into(); let key = HmacKey::new(hash, &args.key.data); let signature = ring::hmac::sign(&key, data); signature.as_ref().to_vec() } - _ => return Err(Error::UnsupportedAlgorithm), + _ => return Err(CryptoError::UnsupportedAlgorithm), }; Ok(signature.into()) @@ -373,7 +407,7 @@ pub struct VerifyArg { pub async fn op_crypto_verify_key( #[serde] args: VerifyArg, #[buffer] zero_copy: JsBuffer, -) -> Result { +) -> Result { deno_core::unsync::spawn_blocking(move || { let data = &*zero_copy; let algorithm = args.algorithm; @@ -384,7 +418,7 @@ pub async fn op_crypto_verify_key( use rsa::pkcs1v15::VerifyingKey; let public_key = read_rsa_public_key(args.key)?; let signature: Signature = args.signature.as_ref().try_into()?; - match args.hash.ok_or_else(|| Error::MissingArgumentHash)? { + match args.hash.ok_or_else(|| CryptoError::MissingArgumentHash)? { CryptoHash::Sha1 => { let verifying_key = VerifyingKey::::new(public_key); verifying_key.verify(data, &signature).is_ok() @@ -409,10 +443,10 @@ pub async fn op_crypto_verify_key( let salt_len = args .salt_length - .ok_or_else(|| Error::MissingArgumentSaltLength)? + .ok_or_else(|| CryptoError::MissingArgumentSaltLength)? as usize; - match args.hash.ok_or_else(|| Error::MissingArgumentHash)? { + match args.hash.ok_or_else(|| CryptoError::MissingArgumentHash)? { CryptoHash::Sha1 => { let pss = Pss::new_with_salt::(salt_len); let hashed = Sha1::digest(data); @@ -436,21 +470,19 @@ pub async fn op_crypto_verify_key( } } Algorithm::Hmac => { - let hash: HmacAlgorithm = args - .hash - .ok_or_else(|| Error::Other(not_supported()))? - .into(); + let hash: HmacAlgorithm = + args.hash.ok_or_else(JsErrorBox::not_supported)?.into(); let key = HmacKey::new(hash, &args.key.data); ring::hmac::verify(&key, data, &args.signature).is_ok() } Algorithm::Ecdsa => { let signing_alg: &EcdsaSigningAlgorithm = args .named_curve - .ok_or_else(|| Error::Other(not_supported()))? + .ok_or_else(JsErrorBox::not_supported)? .into(); let verify_alg: &EcdsaVerificationAlgorithm = args .named_curve - .ok_or_else(|| Error::Other(not_supported()))? + .ok_or_else(JsErrorBox::not_supported)? .into(); let private_key; @@ -464,7 +496,7 @@ pub async fn op_crypto_verify_key( private_key.public_key().as_ref() } KeyType::Public => &*args.key.data, - _ => return Err(Error::InvalidKeyFormat), + _ => return Err(CryptoError::InvalidKeyFormat), }; let public_key = @@ -472,7 +504,7 @@ pub async fn op_crypto_verify_key( public_key.verify(data, &args.signature).is_ok() } - _ => return Err(Error::UnsupportedAlgorithm), + _ => return Err(CryptoError::UnsupportedAlgorithm), }; Ok(verification) @@ -500,31 +532,27 @@ pub struct DeriveKeyArg { pub async fn op_crypto_derive_bits( #[serde] args: DeriveKeyArg, #[buffer] zero_copy: Option, -) -> Result { +) -> Result { deno_core::unsync::spawn_blocking(move || { let algorithm = args.algorithm; match algorithm { Algorithm::Pbkdf2 => { - let zero_copy = - zero_copy.ok_or_else(|| Error::Other(not_supported()))?; + let zero_copy = zero_copy.ok_or_else(JsErrorBox::not_supported)?; let salt = &*zero_copy; // The caller must validate these cases. assert!(args.length > 0); assert!(args.length % 8 == 0); - let algorithm = - match args.hash.ok_or_else(|| Error::Other(not_supported()))? { - CryptoHash::Sha1 => pbkdf2::PBKDF2_HMAC_SHA1, - CryptoHash::Sha256 => pbkdf2::PBKDF2_HMAC_SHA256, - CryptoHash::Sha384 => pbkdf2::PBKDF2_HMAC_SHA384, - CryptoHash::Sha512 => pbkdf2::PBKDF2_HMAC_SHA512, - }; + let algorithm = match args.hash.ok_or_else(JsErrorBox::not_supported)? { + CryptoHash::Sha1 => pbkdf2::PBKDF2_HMAC_SHA1, + CryptoHash::Sha256 => pbkdf2::PBKDF2_HMAC_SHA256, + CryptoHash::Sha384 => pbkdf2::PBKDF2_HMAC_SHA384, + CryptoHash::Sha512 => pbkdf2::PBKDF2_HMAC_SHA512, + }; // This will never panic. We have already checked length earlier. let iterations = NonZeroU32::new( - args - .iterations - .ok_or_else(|| Error::Other(not_supported()))?, + args.iterations.ok_or_else(JsErrorBox::not_supported)?, ) .unwrap(); let secret = args.key.data; @@ -535,33 +563,33 @@ pub async fn op_crypto_derive_bits( Algorithm::Ecdh => { let named_curve = args .named_curve - .ok_or_else(|| Error::MissingArgumentNamedCurve)?; + .ok_or_else(|| CryptoError::MissingArgumentNamedCurve)?; let public_key = args .public_key - .ok_or_else(|| Error::MissingArgumentPublicKey)?; + .ok_or_else(|| CryptoError::MissingArgumentPublicKey)?; match named_curve { CryptoNamedCurve::P256 => { let secret_key = p256::SecretKey::from_pkcs8_der(&args.key.data) - .map_err(|_| Error::DecodePrivateKey)?; + .map_err(|_| CryptoError::DecodePrivateKey)?; let public_key = match public_key.r#type { KeyType::Private => { p256::SecretKey::from_pkcs8_der(&public_key.data) - .map_err(|_| Error::DecodePrivateKey)? + .map_err(|_| CryptoError::DecodePrivateKey)? .public_key() } KeyType::Public => { let point = p256::EncodedPoint::from_bytes(public_key.data) - .map_err(|_| Error::DecodePrivateKey)?; + .map_err(|_| CryptoError::DecodePrivateKey)?; let pk = p256::PublicKey::from_encoded_point(&point); // pk is a constant time Option. if pk.is_some().into() { pk.unwrap() } else { - return Err(Error::DecodePrivateKey); + return Err(CryptoError::DecodePrivateKey); } } _ => unreachable!(), @@ -577,24 +605,24 @@ pub async fn op_crypto_derive_bits( } CryptoNamedCurve::P384 => { let secret_key = p384::SecretKey::from_pkcs8_der(&args.key.data) - .map_err(|_| Error::DecodePrivateKey)?; + .map_err(|_| CryptoError::DecodePrivateKey)?; let public_key = match public_key.r#type { KeyType::Private => { p384::SecretKey::from_pkcs8_der(&public_key.data) - .map_err(|_| Error::DecodePrivateKey)? + .map_err(|_| CryptoError::DecodePrivateKey)? .public_key() } KeyType::Public => { let point = p384::EncodedPoint::from_bytes(public_key.data) - .map_err(|_| Error::DecodePrivateKey)?; + .map_err(|_| CryptoError::DecodePrivateKey)?; let pk = p384::PublicKey::from_encoded_point(&point); // pk is a constant time Option. if pk.is_some().into() { pk.unwrap() } else { - return Err(Error::DecodePrivateKey); + return Err(CryptoError::DecodePrivateKey); } } _ => unreachable!(), @@ -611,18 +639,16 @@ pub async fn op_crypto_derive_bits( } } Algorithm::Hkdf => { - let zero_copy = - zero_copy.ok_or_else(|| Error::Other(not_supported()))?; + let zero_copy = zero_copy.ok_or_else(JsErrorBox::not_supported)?; let salt = &*zero_copy; - let algorithm = - match args.hash.ok_or_else(|| Error::Other(not_supported()))? { - CryptoHash::Sha1 => hkdf::HKDF_SHA1_FOR_LEGACY_USE_ONLY, - CryptoHash::Sha256 => hkdf::HKDF_SHA256, - CryptoHash::Sha384 => hkdf::HKDF_SHA384, - CryptoHash::Sha512 => hkdf::HKDF_SHA512, - }; - - let info = args.info.ok_or_else(|| Error::MissingArgumentInfo)?; + let algorithm = match args.hash.ok_or_else(JsErrorBox::not_supported)? { + CryptoHash::Sha1 => hkdf::HKDF_SHA1_FOR_LEGACY_USE_ONLY, + CryptoHash::Sha256 => hkdf::HKDF_SHA256, + CryptoHash::Sha384 => hkdf::HKDF_SHA384, + CryptoHash::Sha512 => hkdf::HKDF_SHA512, + }; + + let info = args.info.ok_or(CryptoError::MissingArgumentInfo)?; // IKM let secret = args.key.data; // L @@ -633,18 +659,18 @@ pub async fn op_crypto_derive_bits( let info = &[&*info]; let okm = prk .expand(info, HkdfOutput(length)) - .map_err(|_e| Error::HKDFLengthTooLarge)?; + .map_err(|_e| CryptoError::HKDFLengthTooLarge)?; let mut r = vec![0u8; length]; okm.fill(&mut r)?; Ok(r.into()) } - _ => Err(Error::UnsupportedAlgorithm), + _ => Err(CryptoError::UnsupportedAlgorithm), } }) .await? } -fn read_rsa_public_key(key_data: KeyData) -> Result { +fn read_rsa_public_key(key_data: KeyData) -> Result { let public_key = match key_data.r#type { KeyType::Private => { RsaPrivateKey::from_pkcs1_der(&key_data.data)?.to_public_key() @@ -657,7 +683,9 @@ fn read_rsa_public_key(key_data: KeyData) -> Result { #[op2] #[string] -pub fn op_crypto_random_uuid(state: &mut OpState) -> Result { +pub fn op_crypto_random_uuid( + state: &mut OpState, +) -> Result { let maybe_seeded_rng = state.try_borrow_mut::(); let uuid = if let Some(seeded_rng) = maybe_seeded_rng { let mut bytes = [0u8; 16]; @@ -678,7 +706,7 @@ pub fn op_crypto_random_uuid(state: &mut OpState) -> Result { pub async fn op_crypto_subtle_digest( #[serde] algorithm: CryptoHash, #[buffer] data: JsBuffer, -) -> Result { +) -> Result { let output = spawn_blocking(move || { digest::digest(algorithm.into(), &data) .as_ref() @@ -702,7 +730,7 @@ pub struct WrapUnwrapKeyArg { pub fn op_crypto_wrap_key( #[serde] args: WrapUnwrapKeyArg, #[buffer] data: JsBuffer, -) -> Result { +) -> Result { let algorithm = args.algorithm; match algorithm { @@ -710,20 +738,20 @@ pub fn op_crypto_wrap_key( let key = args.key.as_secret_key()?; if data.len() % 8 != 0 { - return Err(Error::DataInvalidSize); + return Err(CryptoError::DataInvalidSize); } let wrapped_key = match key.len() { 16 => KekAes128::new(key.into()).wrap_vec(&data), 24 => KekAes192::new(key.into()).wrap_vec(&data), 32 => KekAes256::new(key.into()).wrap_vec(&data), - _ => return Err(Error::InvalidKeyLength), + _ => return Err(CryptoError::InvalidKeyLength), } - .map_err(|_| Error::EncryptionError)?; + .map_err(|_| CryptoError::EncryptionError)?; Ok(wrapped_key.into()) } - _ => Err(Error::UnsupportedAlgorithm), + _ => Err(CryptoError::UnsupportedAlgorithm), } } @@ -732,27 +760,27 @@ pub fn op_crypto_wrap_key( pub fn op_crypto_unwrap_key( #[serde] args: WrapUnwrapKeyArg, #[buffer] data: JsBuffer, -) -> Result { +) -> Result { let algorithm = args.algorithm; match algorithm { Algorithm::AesKw => { let key = args.key.as_secret_key()?; if data.len() % 8 != 0 { - return Err(Error::DataInvalidSize); + return Err(CryptoError::DataInvalidSize); } let unwrapped_key = match key.len() { 16 => KekAes128::new(key.into()).unwrap_vec(&data), 24 => KekAes192::new(key.into()).unwrap_vec(&data), 32 => KekAes256::new(key.into()).unwrap_vec(&data), - _ => return Err(Error::InvalidKeyLength), + _ => return Err(CryptoError::InvalidKeyLength), } - .map_err(|_| Error::DecryptionError)?; + .map_err(|_| CryptoError::DecryptionError)?; Ok(unwrapped_key.into()) } - _ => Err(Error::UnsupportedAlgorithm), + _ => Err(CryptoError::UnsupportedAlgorithm), } } diff --git a/ext/crypto/shared.rs b/ext/crypto/shared.rs index 60ba560c98a408..1c28e0b87de426 100644 --- a/ext/crypto/shared.rs +++ b/ext/crypto/shared.rs @@ -60,26 +60,36 @@ pub enum RustRawKeyData { Public(ToJsBuffer), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SharedError { + #[class(type)] #[error("expected valid private key")] ExpectedValidPrivateKey, + #[class(type)] #[error("expected valid public key")] ExpectedValidPublicKey, + #[class(type)] #[error("expected valid private EC key")] ExpectedValidPrivateECKey, + #[class(type)] #[error("expected valid public EC key")] ExpectedValidPublicECKey, + #[class(type)] #[error("expected private key")] ExpectedPrivateKey, + #[class(type)] #[error("expected public key")] ExpectedPublicKey, + #[class(type)] #[error("expected secret key")] ExpectedSecretKey, + #[class("DOMExceptionOperationError")] #[error("failed to decode private key")] FailedDecodePrivateKey, + #[class("DOMExceptionOperationError")] #[error("failed to decode public key")] FailedDecodePublicKey, + #[class("DOMExceptionNotSupportedError")] #[error("unsupported format")] UnsupportedFormat, } diff --git a/ext/crypto/x25519.rs b/ext/crypto/x25519.rs index 80537d435f1794..226ed89e40ee74 100644 --- a/ext/crypto/x25519.rs +++ b/ext/crypto/x25519.rs @@ -11,10 +11,12 @@ use spki::der::asn1::BitString; use spki::der::Decode; use spki::der::Encode; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum X25519Error { + #[class("DOMExceptionOperationError")] #[error("Failed to export key")] FailedExport, + #[class(generic)] #[error(transparent)] Der(#[from] spki::der::Error), } diff --git a/ext/crypto/x448.rs b/ext/crypto/x448.rs index c582aa96613a93..2086a8f048ff6a 100644 --- a/ext/crypto/x448.rs +++ b/ext/crypto/x448.rs @@ -12,10 +12,12 @@ use spki::der::asn1::BitString; use spki::der::Decode; use spki::der::Encode; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum X448Error { + #[class("DOMExceptionOperationError")] #[error("Failed to export key")] FailedExport, + #[class(generic)] #[error(transparent)] Der(#[from] spki::der::Error), } diff --git a/ext/fetch/Cargo.toml b/ext/fetch/Cargo.toml index ec82281bf4b868..21f77153b95514 100644 --- a/ext/fetch/Cargo.toml +++ b/ext/fetch/Cargo.toml @@ -18,6 +18,7 @@ base64.workspace = true bytes.workspace = true data-url.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_path_util.workspace = true deno_permissions.workspace = true deno_tls.workspace = true diff --git a/ext/fetch/fs_fetch_handler.rs b/ext/fetch/fs_fetch_handler.rs index 33293d0a27b9e0..8761eb2d658970 100644 --- a/ext/fetch/fs_fetch_handler.rs +++ b/ext/fetch/fs_fetch_handler.rs @@ -8,6 +8,7 @@ use deno_core::futures::TryStreamExt; use deno_core::url::Url; use deno_core::CancelFuture; use deno_core::OpState; +use deno_error::JsErrorBox; use http::StatusCode; use http_body_util::BodyExt; use tokio_util::io::ReaderStream; @@ -34,7 +35,7 @@ impl FetchHandler for FsFetchHandler { let file = tokio::fs::File::open(path).map_err(|_| ()).await?; let stream = ReaderStream::new(file) .map_ok(hyper::body::Frame::data) - .map_err(Into::into); + .map_err(JsErrorBox::from_err); let body = http_body_util::StreamBody::new(stream).boxed(); let response = http::Response::builder() .status(StatusCode::OK) diff --git a/ext/fetch/lib.rs b/ext/fetch/lib.rs index d1f3963743fe7d..5af68695efb0b6 100644 --- a/ext/fetch/lib.rs +++ b/ext/fetch/lib.rs @@ -46,6 +46,7 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsErrorBox; use deno_path_util::url_from_file_path; use deno_path_util::PathToUrlError; use deno_permissions::PermissionCheckError; @@ -100,9 +101,8 @@ pub struct Options { /// For more info on what can be configured, see [`hyper_util::client::legacy::Builder`]. pub client_builder_hook: Option HyperClientBuilder>, #[allow(clippy::type_complexity)] - pub request_builder_hook: Option< - fn(&mut http::Request) -> Result<(), deno_core::error::AnyError>, - >, + pub request_builder_hook: + Option) -> Result<(), JsErrorBox>>, pub unsafely_ignore_certificate_errors: Option>, pub client_cert_chain_and_key: TlsKeys, pub file_fetch_handler: Rc, @@ -110,9 +110,7 @@ pub struct Options { } impl Options { - pub fn root_cert_store( - &self, - ) -> Result, deno_core::error::AnyError> { + pub fn root_cert_store(&self) -> Result, JsErrorBox> { Ok(match &self.root_cert_store_provider { Some(provider) => Some(provider.get_or_try_init()?.clone()), None => None, @@ -164,48 +162,71 @@ deno_core::extension!(deno_fetch, }, ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum FetchError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Permission(#[from] PermissionCheckError), + #[class(type)] #[error("NetworkError when attempting to fetch resource")] NetworkError, + #[class(type)] #[error("Fetching files only supports the GET method: received {0}")] FsNotGet(Method), + #[class(inherit)] #[error(transparent)] PathToUrl(#[from] PathToUrlError), + #[class(type)] #[error("Invalid URL {0}")] InvalidUrl(Url), + #[class(type)] #[error(transparent)] InvalidHeaderName(#[from] http::header::InvalidHeaderName), + #[class(type)] #[error(transparent)] InvalidHeaderValue(#[from] http::header::InvalidHeaderValue), + #[class(type)] #[error("{0:?}")] DataUrl(data_url::DataUrlError), + #[class(type)] #[error("{0:?}")] Base64(data_url::forgiving_base64::InvalidBase64), + #[class(type)] #[error("Blob for the given URL not found.")] BlobNotFound, + #[class(type)] #[error("Url scheme '{0}' not supported")] SchemeNotSupported(String), + #[class(type)] #[error("Request was cancelled")] RequestCanceled, + #[class(generic)] #[error(transparent)] Http(#[from] http::Error), + #[class(inherit)] #[error(transparent)] ClientCreate(#[from] HttpClientCreateError), + #[class(inherit)] #[error(transparent)] Url(#[from] url::ParseError), + #[class(type)] #[error(transparent)] Method(#[from] http::method::InvalidMethod), + #[class(inherit)] #[error(transparent)] ClientSend(#[from] ClientSendError), + #[class(inherit)] #[error(transparent)] - RequestBuilderHook(deno_core::error::AnyError), + RequestBuilderHook(JsErrorBox), + #[class(inherit)] #[error(transparent)] Io(#[from] std::io::Error), + #[class(generic)] + #[error(transparent)] + Dns(hickory_resolver::ResolveError), } pub type CancelableResponseFuture = @@ -294,9 +315,7 @@ pub fn create_client_from_options( #[allow(clippy::type_complexity)] pub struct ResourceToBodyAdapter( Rc, - Option< - Pin>>>, - >, + Option>>>>, ); impl ResourceToBodyAdapter { @@ -312,7 +331,7 @@ unsafe impl Send for ResourceToBodyAdapter {} unsafe impl Sync for ResourceToBodyAdapter {} impl Stream for ResourceToBodyAdapter { - type Item = Result; + type Item = Result; fn poll_next( self: Pin<&mut Self>, @@ -342,7 +361,7 @@ impl Stream for ResourceToBodyAdapter { impl hyper::body::Body for ResourceToBodyAdapter { type Data = Bytes; - type Error = deno_core::error::AnyError; + type Error = JsErrorBox; fn poll_frame( self: Pin<&mut Self>, @@ -417,10 +436,7 @@ where FP: FetchPermissions + 'static, { let (client, allow_host) = if let Some(rid) = client_rid { - let r = state - .resource_table - .get::(rid) - .map_err(FetchError::Resource)?; + let r = state.resource_table.get::(rid)?; (r.client.clone(), r.allow_host) } else { (get_or_create_client_from_state(state)?, false) @@ -479,10 +495,7 @@ where ReqBody::full(data.to_vec().into()) } (_, Some(resource)) => { - let resource = state - .resource_table - .take_any(resource) - .map_err(FetchError::Resource)?; + let resource = state.resource_table.take_any(resource)?; match resource.size_hint() { (body_size, Some(n)) if body_size == n && body_size > 0 => { con_len = Some(body_size); @@ -624,8 +637,7 @@ pub async fn op_fetch_send( let request = state .borrow_mut() .resource_table - .take::(rid) - .map_err(FetchError::Resource)?; + .take::(rid)?; let request = Rc::try_unwrap(request) .ok() @@ -804,9 +816,7 @@ impl Resource for FetchResponseResource { // safely call `await` on it without creating a race condition. Some(_) => match reader.as_mut().next().await.unwrap() { Ok(chunk) => assert!(chunk.is_empty()), - Err(err) => { - break Err(deno_core::error::type_error(err.to_string())) - } + Err(err) => break Err(JsErrorBox::type_error(err.to_string())), }, None => break Ok(BufView::empty()), } @@ -814,7 +824,10 @@ impl Resource for FetchResponseResource { }; let cancel_handle = RcRef::map(self, |r| &r.cancel); - fut.try_or_cancel(cancel_handle).await + fut + .try_or_cancel(cancel_handle) + .await + .map_err(JsErrorBox::from_err) }) } @@ -897,9 +910,7 @@ where ca_certs, proxy: args.proxy, dns_resolver: if args.use_hickory_resolver { - dns::Resolver::hickory() - .map_err(deno_core::error::AnyError::new) - .map_err(FetchError::Resource)? + dns::Resolver::hickory().map_err(FetchError::Dns)? } else { dns::Resolver::default() }, @@ -963,7 +974,8 @@ impl Default for CreateHttpClientOptions { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum HttpClientCreateError { #[error(transparent)] Tls(deno_tls::TlsError), @@ -973,8 +985,9 @@ pub enum HttpClientCreateError { InvalidProxyUrl, #[error("Cannot create Http Client: either `http1` or `http2` needs to be set to true")] HttpVersionSelectionInvalid, + #[class(inherit)] #[error(transparent)] - RootCertStore(deno_core::error::AnyError), + RootCertStore(JsErrorBox), } /// Create new instance of async Client. This client supports @@ -1097,7 +1110,8 @@ type Connector = proxy::ProxyConnector>; #[allow(clippy::declare_interior_mutable_const)] const STAR_STAR: HeaderValue = HeaderValue::from_static("*/*"); -#[derive(Debug)] +#[derive(Debug, deno_error::JsError)] +#[class(type)] pub struct ClientSendError { uri: Uri, pub source: hyper_util::client::legacy::Error, @@ -1172,7 +1186,7 @@ impl Client { .oneshot(req) .await .map_err(|e| ClientSendError { uri, source: e })?; - Ok(resp.map(|b| b.map_err(|e| deno_core::anyhow::anyhow!(e)).boxed())) + Ok(resp.map(|b| b.map_err(|e| JsErrorBox::generic(e.to_string())).boxed())) } } @@ -1180,10 +1194,10 @@ impl Client { pub enum ReqBody { Full(http_body_util::Full), Empty(http_body_util::Empty), - Streaming(BoxBody), + Streaming(BoxBody), } -pub type ResBody = BoxBody; +pub type ResBody = BoxBody; impl ReqBody { pub fn full(bytes: Bytes) -> Self { @@ -1196,7 +1210,7 @@ impl ReqBody { pub fn streaming(body: B) -> Self where - B: hyper::body::Body + B: hyper::body::Body + Send + Sync + 'static, @@ -1207,7 +1221,7 @@ impl ReqBody { impl hyper::body::Body for ReqBody { type Data = Bytes; - type Error = deno_core::error::AnyError; + type Error = JsErrorBox; fn poll_frame( mut self: Pin<&mut Self>, diff --git a/ext/ffi/Cargo.toml b/ext/ffi/Cargo.toml index 0af0f4a131859c..b78aa36d7cbddc 100644 --- a/ext/ffi/Cargo.toml +++ b/ext/ffi/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_permissions.workspace = true dlopen2.workspace = true dynasmrt = "1.2.3" diff --git a/ext/ffi/call.rs b/ext/ffi/call.rs index 001b925af520fc..4f9a057d01379a 100644 --- a/ext/ffi/call.rs +++ b/ext/ffi/call.rs @@ -25,18 +25,24 @@ use crate::symbol::Symbol; use crate::FfiPermissions; use crate::ForeignFunction; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CallError { + #[class(type)] #[error(transparent)] IR(#[from] IRError), + #[class(generic)] #[error("Nonblocking FFI call failed: {0}")] NonblockingCallFailure(#[source] tokio::task::JoinError), + #[class(type)] #[error("Invalid FFI symbol name: '{0}'")] InvalidSymbol(String), + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Callback(#[from] super::CallbackError), } @@ -346,10 +352,7 @@ pub fn op_ffi_call_nonblocking( ) -> Result>, CallError> { let symbol = { let state = state.borrow(); - let resource = state - .resource_table - .get::(rid) - .map_err(CallError::Resource)?; + let resource = state.resource_table.get::(rid)?; let symbols = &resource.symbols; *symbols .get(&symbol) diff --git a/ext/ffi/callback.rs b/ext/ffi/callback.rs index c4b5e14842fab4..a6c104ef42cc9c 100644 --- a/ext/ffi/callback.rs +++ b/ext/ffi/callback.rs @@ -35,14 +35,17 @@ thread_local! { static LOCAL_THREAD_ID: RefCell = const { RefCell::new(0) }; } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CallbackError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[from] deno_error::JsErrorBox), } #[derive(Clone)] @@ -63,13 +66,8 @@ impl PtrSymbol { .clone() .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>() - .map_err(CallbackError::Other)?, - def - .result - .clone() - .try_into() - .map_err(CallbackError::Other)?, + .collect::, _>>()?, + def.result.clone().try_into()?, ); Ok(Self { cif, ptr }) @@ -540,10 +538,8 @@ pub fn op_ffi_unsafe_callback_ref( #[smi] rid: ResourceId, ) -> Result, CallbackError> { let state = state.borrow(); - let callback_resource = state - .resource_table - .get::(rid) - .map_err(CallbackError::Resource)?; + let callback_resource = + state.resource_table.get::(rid)?; Ok(async move { let info: &mut CallbackInfo = @@ -610,10 +606,8 @@ where .parameters .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>() - .map_err(CallbackError::Other)?, - libffi::middle::Type::try_from(args.result) - .map_err(CallbackError::Other)?, + .collect::, _>>()?, + libffi::middle::Type::try_from(args.result)?, ); // SAFETY: CallbackInfo is leaked, is not null and stays valid as long as the callback exists. @@ -649,10 +643,8 @@ pub fn op_ffi_unsafe_callback_close( // It is up to the user to know that it is safe to call the `close()` on the // UnsafeCallback instance. unsafe { - let callback_resource = state - .resource_table - .take::(rid) - .map_err(CallbackError::Resource)?; + let callback_resource = + state.resource_table.take::(rid)?; let info = Box::from_raw(callback_resource.info); let _ = v8::Global::from_raw(scope, info.callback); let _ = v8::Global::from_raw(scope, info.context); diff --git a/ext/ffi/dlfcn.rs b/ext/ffi/dlfcn.rs index 4eea2402a04707..da5a85e7e3025e 100644 --- a/ext/ffi/dlfcn.rs +++ b/ext/ffi/dlfcn.rs @@ -11,6 +11,8 @@ use deno_core::v8; use deno_core::GarbageCollected; use deno_core::OpState; use deno_core::Resource; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; use dlopen2::raw::Library; use serde::Deserialize; use serde_value::ValueDeserializer; @@ -22,20 +24,34 @@ use crate::turbocall; use crate::turbocall::Turbocall; use crate::FfiPermissions; -#[derive(Debug, thiserror::Error)] +deno_error::js_error_wrapper!(dlopen2::Error, JsDlopen2Error, |err| { + match err { + dlopen2::Error::NullCharacter(_) => "InvalidData".into(), + dlopen2::Error::OpeningLibraryError(e) => e.get_class(), + dlopen2::Error::SymbolGettingError(e) => e.get_class(), + dlopen2::Error::AddrNotMatchingDll(e) => e.get_class(), + dlopen2::Error::NullSymbol => "NotFound".into(), + } +}); + +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum DlfcnError { + #[class(generic)] #[error("Failed to register symbol {symbol}: {error}")] RegisterSymbol { symbol: String, #[source] error: dlopen2::Error, }, + #[class(generic)] #[error(transparent)] Dlopen(#[from] dlopen2::Error), + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[from] JsErrorBox), } pub struct DynamicLibraryResource { @@ -190,13 +206,8 @@ where .clone() .into_iter() .map(libffi::middle::Type::try_from) - .collect::, _>>() - .map_err(DlfcnError::Other)?, - foreign_fn - .result - .clone() - .try_into() - .map_err(DlfcnError::Other)?, + .collect::, _>>()?, + foreign_fn.result.clone().try_into()?, ); let func_key = v8::String::new(scope, &symbol_key).unwrap(); @@ -304,9 +315,7 @@ fn sync_fn_impl<'s>( unsafe { result.to_v8(scope, data.symbol.result_type.clone()) }; rv.set(result); } - Err(err) => { - deno_core::_ops::throw_type_error(scope, err.to_string()); - } + Err(err) => deno_core::error::throw_js_error_class(scope, &err), }; } diff --git a/ext/ffi/ir.rs b/ext/ffi/ir.rs index 7b2f167ce75d91..a1877b4a2bb9b2 100644 --- a/ext/ffi/ir.rs +++ b/ext/ffi/ir.rs @@ -8,7 +8,8 @@ use libffi::middle::Arg; use crate::symbol::NativeType; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum IRError { #[error("Invalid FFI u8 type, expected boolean")] InvalidU8ExpectedBoolean, diff --git a/ext/ffi/repr.rs b/ext/ffi/repr.rs index 05bef40a8969b2..bcd80cbf03e7af 100644 --- a/ext/ffi/repr.rs +++ b/ext/ffi/repr.rs @@ -11,7 +11,8 @@ use deno_core::OpState; use crate::FfiPermissions; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum ReprError { #[error("Invalid pointer to offset, pointer is null")] InvalidOffset, @@ -47,6 +48,7 @@ pub enum ReprError { InvalidF64, #[error("Invalid pointer pointer, pointer is null")] InvalidPointer, + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), } diff --git a/ext/ffi/static.rs b/ext/ffi/static.rs index 6ad7fe6d37e020..6d999430e383f9 100644 --- a/ext/ffi/static.rs +++ b/ext/ffi/static.rs @@ -10,16 +10,20 @@ use deno_core::ResourceId; use crate::dlfcn::DynamicLibraryResource; use crate::symbol::NativeType; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum StaticError { + #[class(inherit)] #[error(transparent)] Dlfcn(super::DlfcnError), + #[class(type)] #[error("Invalid FFI static type 'void'")] InvalidTypeVoid, + #[class(type)] #[error("Invalid FFI static type 'struct'")] InvalidTypeStruct, + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), } #[op2] @@ -31,10 +35,7 @@ pub fn op_ffi_get_static<'scope>( #[serde] static_type: NativeType, optional: bool, ) -> Result, StaticError> { - let resource = state - .resource_table - .get::(rid) - .map_err(StaticError::Resource)?; + let resource = state.resource_table.get::(rid)?; let data_ptr = match resource.get_static(name) { Ok(data_ptr) => data_ptr, diff --git a/ext/ffi/symbol.rs b/ext/ffi/symbol.rs index c4a68cf753c688..5bca5be6d2adae 100644 --- a/ext/ffi/symbol.rs +++ b/ext/ffi/symbol.rs @@ -1,7 +1,6 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; +use deno_error::JsErrorBox; /// Defines the accepted types that can be used as /// parameters and return values in FFI. @@ -29,7 +28,7 @@ pub enum NativeType { } impl TryFrom for libffi::middle::Type { - type Error = AnyError; + type Error = JsErrorBox; fn try_from(native_type: NativeType) -> Result { Ok(match native_type { @@ -56,7 +55,9 @@ impl TryFrom for libffi::middle::Type { .map(|field| field.clone().try_into()) .collect::, _>>()?, false => { - return Err(type_error("Struct must have at least one field")) + return Err(JsErrorBox::type_error( + "Struct must have at least one field", + )) } }) } diff --git a/ext/fs/Cargo.toml b/ext/fs/Cargo.toml index b7d22b7f938268..05141e46c96ef6 100644 --- a/ext/fs/Cargo.toml +++ b/ext/fs/Cargo.toml @@ -21,6 +21,7 @@ async-trait.workspace = true base32.workspace = true boxed_error.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_io.workspace = true deno_path_util.workspace = true deno_permissions.workspace = true diff --git a/ext/fs/ops.rs b/ext/fs/ops.rs index 64be03ea61100c..9f5f3c6e900df0 100644 --- a/ext/fs/ops.rs +++ b/ext/fs/ops.rs @@ -12,6 +12,7 @@ use std::path::StripPrefixError; use std::rc::Rc; use boxed_error::Boxed; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::CancelFuture; use deno_core::CancelHandle; @@ -20,6 +21,7 @@ use deno_core::JsBuffer; use deno_core::OpState; use deno_core::ResourceId; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use deno_io::fs::FileResource; use deno_io::fs::FsError; use deno_io::fs::FsStat; @@ -36,34 +38,46 @@ use crate::interface::FsFileType; use crate::FsPermissions; use crate::OpenOptions; -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, deno_error::JsError)] pub struct FsOpsError(pub Box); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum FsOpsErrorKind { + #[class(inherit)] #[error("{0}")] Io(#[source] std::io::Error), + #[class(inherit)] #[error("{0}")] OperationError(#[source] OperationError), + #[class(inherit)] #[error(transparent)] Permission(#[from] PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] ResourceError), + #[class("InvalidData")] #[error("File name or path {0:?} is not valid UTF-8")] InvalidUtf8(std::ffi::OsString), + #[class(generic)] #[error("{0}")] StripPrefix(#[from] StripPrefixError), + #[class(inherit)] #[error("{0}")] Canceled(#[from] deno_core::Canceled), + #[class(type)] #[error("Invalid seek mode: {0}")] InvalidSeekMode(i32), + #[class(generic)] #[error("Invalid control character in prefix or suffix: {0:?}")] InvalidControlCharacter(String), + #[class(generic)] #[error("Invalid character in prefix or suffix: {0:?}")] InvalidCharacter(String), #[cfg(windows)] + #[class(generic)] #[error("Invalid trailing character in suffix")] InvalidTrailingCharacter, + #[class("NotCapable")] #[error("Requires {err} access to {path}, {}", print_not_capable_info(*.standalone, .err))] NotCapableAccess { // NotCapable @@ -71,21 +85,21 @@ pub enum FsOpsErrorKind { err: &'static str, path: String, }, + #[class("NotCapable")] #[error("permission denied: {0}")] - NotCapable(&'static str), // NotCapable + NotCapable(&'static str), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(JsErrorBox), } impl From for FsOpsError { fn from(err: FsError) -> Self { match err { FsError::Io(err) => FsOpsErrorKind::Io(err), - FsError::FileBusy => { - FsOpsErrorKind::Other(deno_core::error::resource_unavailable()) - } + FsError::FileBusy => FsOpsErrorKind::Resource(ResourceError::Unavailable), FsError::NotSupported => { - FsOpsErrorKind::Other(deno_core::error::not_supported()) + FsOpsErrorKind::Other(JsErrorBox::not_supported()) } FsError::NotCapable(err) => FsOpsErrorKind::NotCapable(err), } @@ -1666,10 +1680,12 @@ pub async fn op_fs_futime_async( Ok(()) } -#[derive(Debug)] +#[derive(Debug, deno_error::JsError)] +#[class(inherit)] pub struct OperationError { operation: &'static str, kind: OperationErrorKind, + #[inherit] pub err: FsError, } diff --git a/ext/http/Cargo.toml b/ext/http/Cargo.toml index 8f131b84e60e08..1ecb6f66c85915 100644 --- a/ext/http/Cargo.toml +++ b/ext/http/Cargo.toml @@ -28,6 +28,7 @@ brotli.workspace = true bytes.workspace = true cache_control.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_net.workspace = true deno_websocket.workspace = true flate2.workspace = true diff --git a/ext/http/http_next.rs b/ext/http/http_next.rs index d9861a0a6b69b1..82edf817bfe5fa 100644 --- a/ext/http/http_next.rs +++ b/ext/http/http_next.rs @@ -146,24 +146,44 @@ macro_rules! clone_external { }}; } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum HttpNextError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error("{0}")] Io(#[from] io::Error), + #[class(inherit)] #[error(transparent)] WebSocketUpgrade(crate::websocket_upgrade::WebSocketUpgradeError), + #[class("Http")] #[error("{0}")] Hyper(#[from] hyper::Error), + #[class(inherit)] #[error(transparent)] - JoinError(#[from] tokio::task::JoinError), + JoinError( + #[from] + #[inherit] + tokio::task::JoinError, + ), + #[class(inherit)] #[error(transparent)] - Canceled(#[from] deno_core::Canceled), - #[error(transparent)] - HttpPropertyExtractor(deno_core::error::AnyError), + Canceled( + #[from] + #[inherit] + deno_core::Canceled, + ), + #[class(generic)] #[error(transparent)] UpgradeUnavailable(#[from] crate::service::UpgradeUnavailableError), + #[class(inherit)] + #[error("{0}")] + Other( + #[from] + #[inherit] + deno_error::JsErrorBox, + ), } #[op2(fast)] @@ -747,15 +767,9 @@ pub async fn op_http_set_response_body_resource( let resource = { let mut state = state.borrow_mut(); if auto_close { - state - .resource_table - .take_any(stream_rid) - .map_err(HttpNextError::Resource)? + state.resource_table.take_any(stream_rid)? } else { - state - .resource_table - .get_any(stream_rid) - .map_err(HttpNextError::Resource)? + state.resource_table.get_any(stream_rid)? } }; @@ -1063,8 +1077,7 @@ where HTTP: HttpPropertyExtractor, { let listener = - HTTP::get_listener_for_rid(&mut state.borrow_mut(), listener_rid) - .map_err(HttpNextError::Resource)?; + HTTP::get_listener_for_rid(&mut state.borrow_mut(), listener_rid)?; let listen_properties = HTTP::listen_properties_from_listener(&listener)?; @@ -1084,8 +1097,7 @@ where loop { let conn = HTTP::accept_connection_from_listener(&listener) .try_or_cancel(listen_cancel_clone.clone()) - .await - .map_err(HttpNextError::HttpPropertyExtractor)?; + .await?; serve_http_on::( conn, &listen_properties_clone, @@ -1120,8 +1132,7 @@ where HTTP: HttpPropertyExtractor, { let connection = - HTTP::get_connection_for_rid(&mut state.borrow_mut(), connection_rid) - .map_err(HttpNextError::Resource)?; + HTTP::get_connection_for_rid(&mut state.borrow_mut(), connection_rid)?; let listen_properties = HTTP::listen_properties_from_connection(&connection)?; @@ -1190,8 +1201,7 @@ pub async fn op_http_wait( let join_handle = state .borrow_mut() .resource_table - .get::(rid) - .map_err(HttpNextError::Resource)?; + .get::(rid)?; let cancel = join_handle.listen_cancel_handle(); let next = async { @@ -1236,7 +1246,7 @@ pub fn op_http_cancel( state: &mut OpState, #[smi] rid: ResourceId, graceful: bool, -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), deno_core::error::ResourceError> { let join_handle = state.resource_table.get::(rid)?; if graceful { @@ -1260,8 +1270,7 @@ pub async fn op_http_close( let join_handle = state .borrow_mut() .resource_table - .take::(rid) - .map_err(HttpNextError::Resource)?; + .take::(rid)?; if graceful { http_general_trace!("graceful shutdown"); @@ -1390,11 +1399,8 @@ pub async fn op_raw_write_vectored( #[buffer] buf1: JsBuffer, #[buffer] buf2: JsBuffer, ) -> Result { - let resource: Rc = state - .borrow() - .resource_table - .get::(rid) - .map_err(HttpNextError::Resource)?; + let resource: Rc = + state.borrow().resource_table.get::(rid)?; let nwritten = resource.write_vectored(&buf1, &buf2).await?; Ok(nwritten) } diff --git a/ext/http/lib.rs b/ext/http/lib.rs index e68bf3787dcc0c..981ca9f0c06086 100644 --- a/ext/http/lib.rs +++ b/ext/http/lib.rs @@ -51,6 +51,7 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::StringOrBuffer; +use deno_error::JsErrorBox; use deno_net::raw::NetworkStream; use deno_websocket::ws_create_server_stream; use flate2::write::GzEncoder; @@ -165,36 +166,50 @@ deno_core::extension!( } ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum HttpError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), + #[class("Http")] #[error("{0}")] HyperV014(#[source] Arc), + #[class(generic)] #[error("{0}")] InvalidHeaderName(#[from] hyper_v014::header::InvalidHeaderName), + #[class(generic)] #[error("{0}")] InvalidHeaderValue(#[from] hyper_v014::header::InvalidHeaderValue), + #[class(generic)] #[error("{0}")] Http(#[from] hyper_v014::http::Error), + #[class("Http")] #[error("response headers already sent")] ResponseHeadersAlreadySent, + #[class("Http")] #[error("connection closed while sending response")] ConnectionClosedWhileSendingResponse, + #[class("Http")] #[error("already in use")] AlreadyInUse, + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), + #[class("Http")] #[error("no response headers")] NoResponseHeaders, + #[class("Http")] #[error("response already completed")] ResponseAlreadyCompleted, + #[class("Http")] #[error("cannot upgrade because request body was used")] UpgradeBodyUsed, + #[class("Http")] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[from] JsErrorBox), } pub enum HttpSocketAddr { @@ -486,7 +501,9 @@ impl Resource for HttpStreamReadResource { Some(_) => match body.as_mut().next().await.unwrap() { Ok(chunk) => assert!(chunk.is_empty()), Err(err) => { - break Err(HttpError::HyperV014(Arc::new(err)).into()) + break Err(JsErrorBox::from_err(HttpError::HyperV014( + Arc::new(err), + ))) } }, None => break Ok(BufView::empty()), @@ -610,11 +627,7 @@ async fn op_http_accept( state: Rc>, #[smi] rid: ResourceId, ) -> Result, HttpError> { - let conn = state - .borrow() - .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + let conn = state.borrow().resource_table.get::(rid)?; match conn.accept().await { Ok(Some((read_stream, write_stream, method, url))) => { @@ -729,8 +742,7 @@ async fn op_http_write_headers( let stream = state .borrow_mut() .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + .get::(rid)?; // Track supported encoding let encoding = stream.accept_encoding; @@ -795,10 +807,7 @@ fn op_http_headers( state: &mut OpState, #[smi] rid: u32, ) -> Result, HttpError> { - let stream = state - .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + let stream = state.resource_table.get::(rid)?; let rd = RcRef::map(&stream, |r| &r.rd) .try_borrow() .ok_or(HttpError::AlreadyInUse)?; @@ -954,14 +963,9 @@ async fn op_http_write_resource( let http_stream = state .borrow() .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + .get::(rid)?; let mut wr = RcRef::map(&http_stream, |r| &r.wr).borrow_mut().await; - let resource = state - .borrow() - .resource_table - .get_any(stream) - .map_err(HttpError::Resource)?; + let resource = state.borrow().resource_table.get_any(stream)?; loop { match *wr { HttpResponseWriter::Headers(_) => { @@ -973,11 +977,7 @@ async fn op_http_write_resource( _ => {} }; - let view = resource - .clone() - .read(64 * 1024) - .await - .map_err(HttpError::Other)?; // 64KB + let view = resource.clone().read(64 * 1024).await?; // 64KB if view.is_empty() { break; } @@ -1022,8 +1022,7 @@ async fn op_http_write( let stream = state .borrow() .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + .get::(rid)?; let mut wr = RcRef::map(&stream, |r| &r.wr).borrow_mut().await; match &mut *wr { @@ -1075,8 +1074,7 @@ async fn op_http_shutdown( let stream = state .borrow() .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + .get::(rid)?; let mut wr = RcRef::map(&stream, |r| &r.wr).borrow_mut().await; let wr = take(&mut *wr); match wr { @@ -1122,8 +1120,7 @@ async fn op_http_upgrade_websocket( let stream = state .borrow_mut() .resource_table - .get::(rid) - .map_err(HttpError::Resource)?; + .get::(rid)?; let mut rd = RcRef::map(&stream, |r| &r.rd).borrow_mut().await; let request = match &mut *rd { diff --git a/ext/http/request_body.rs b/ext/http/request_body.rs index e7c9a06e2c4323..50ca1635c3a8de 100644 --- a/ext/http/request_body.rs +++ b/ext/http/request_body.rs @@ -15,6 +15,7 @@ use deno_core::AsyncResult; use deno_core::BufView; use deno_core::RcRef; use deno_core::Resource; +use deno_error::JsErrorBox; use hyper::body::Body; use hyper::body::Incoming; use hyper::body::SizeHint; @@ -83,7 +84,10 @@ impl Resource for HttpRequestBody { } fn read(self: Rc, limit: usize) -> AsyncResult { - Box::pin(HttpRequestBody::read(self, limit).map_err(Into::into)) + Box::pin( + HttpRequestBody::read(self, limit) + .map_err(|e| JsErrorBox::new("Http", e.to_string())), + ) } fn size_hint(&self) -> (u64, Option) { diff --git a/ext/http/request_properties.rs b/ext/http/request_properties.rs index 9e60a22baf278f..32ae33a34a47a0 100644 --- a/ext/http/request_properties.rs +++ b/ext/http/request_properties.rs @@ -5,9 +5,9 @@ use std::net::SocketAddr; use std::net::SocketAddrV4; use std::rc::Rc; -use deno_core::error::AnyError; use deno_core::OpState; use deno_core::ResourceId; +use deno_error::JsErrorBox; use deno_net::raw::take_network_stream_listener_resource; use deno_net::raw::take_network_stream_resource; use deno_net::raw::NetworkStream; @@ -50,13 +50,13 @@ pub trait HttpPropertyExtractor { fn get_listener_for_rid( state: &mut OpState, listener_rid: ResourceId, - ) -> Result; + ) -> Result; /// Given a connection [`ResourceId`], returns the [`HttpPropertyExtractor::Connection`]. fn get_connection_for_rid( state: &mut OpState, connection_rid: ResourceId, - ) -> Result; + ) -> Result; /// Determines the listener properties. fn listen_properties_from_listener( @@ -71,7 +71,7 @@ pub trait HttpPropertyExtractor { /// Accept a new [`HttpPropertyExtractor::Connection`] from the given listener [`HttpPropertyExtractor::Listener`]. async fn accept_connection_from_listener( listener: &Self::Listener, - ) -> Result; + ) -> Result; /// Determines the connection properties. fn connection_properties( @@ -103,7 +103,7 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { fn get_listener_for_rid( state: &mut OpState, listener_rid: ResourceId, - ) -> Result { + ) -> Result { take_network_stream_listener_resource( &mut state.resource_table, listener_rid, @@ -113,17 +113,18 @@ impl HttpPropertyExtractor for DefaultHttpPropertyExtractor { fn get_connection_for_rid( state: &mut OpState, stream_rid: ResourceId, - ) -> Result { + ) -> Result { take_network_stream_resource(&mut state.resource_table, stream_rid) + .map_err(JsErrorBox::from_err) } async fn accept_connection_from_listener( listener: &NetworkStreamListener, - ) -> Result { + ) -> Result { listener .accept() .await - .map_err(Into::into) + .map_err(JsErrorBox::from_err) .map(|(stm, _)| stm) } diff --git a/ext/http/response_body.rs b/ext/http/response_body.rs index ff264a83055d8f..6960e7c0fb3dde 100644 --- a/ext/http/response_body.rs +++ b/ext/http/response_body.rs @@ -9,12 +9,12 @@ use brotli::enc::encode::BrotliEncoderStateStruct; use brotli::writer::StandardAlloc; use bytes::Bytes; use bytes::BytesMut; -use deno_core::error::AnyError; use deno_core::futures::ready; use deno_core::futures::FutureExt; use deno_core::AsyncResult; use deno_core::BufView; use deno_core::Resource; +use deno_error::JsErrorBox; use flate2::write::GzEncoder; use hyper::body::Frame; use hyper::body::SizeHint; @@ -32,10 +32,10 @@ pub enum ResponseStreamResult { /// will only be returned from compression streams that require additional buffering. NoData, /// Stream failed. - Error(AnyError), + Error(JsErrorBox), } -impl From for Option, AnyError>> { +impl From for Option, JsErrorBox>> { fn from(value: ResponseStreamResult) -> Self { match value { ResponseStreamResult::EndOfStream => None, @@ -411,7 +411,9 @@ impl PollFrame for GZipResponseStream { }; let len = stm.total_out() - start_out; let res = match res { - Err(err) => ResponseStreamResult::Error(err.into()), + Err(err) => { + ResponseStreamResult::Error(JsErrorBox::generic(err.to_string())) + } Ok(flate2::Status::BufError) => { // This should not happen unreachable!("old={orig_state:?} new={state:?} buf_len={}", buf.len()); diff --git a/ext/http/service.rs b/ext/http/service.rs index 3b7db49fc4ab5c..f220f8d8a7813b 100644 --- a/ext/http/service.rs +++ b/ext/http/service.rs @@ -15,6 +15,7 @@ use deno_core::futures::ready; use deno_core::BufView; use deno_core::OpState; use deno_core::ResourceId; +use deno_error::JsErrorBox; use http::request::Parts; use hyper::body::Body; use hyper::body::Frame; @@ -529,7 +530,7 @@ pub struct HttpRecordResponse(ManuallyDrop>); impl Body for HttpRecordResponse { type Data = BufView; - type Error = deno_core::error::AnyError; + type Error = JsErrorBox; fn poll_frame( self: Pin<&mut Self>, diff --git a/ext/http/websocket_upgrade.rs b/ext/http/websocket_upgrade.rs index aae4a13883ee93..e030f1c7ae55f5 100644 --- a/ext/http/websocket_upgrade.rs +++ b/ext/http/websocket_upgrade.rs @@ -12,22 +12,30 @@ use memmem::Searcher; use memmem::TwoWaySearcher; use once_cell::sync::OnceCell; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum WebSocketUpgradeError { + #[class("Http")] #[error("invalid headers")] InvalidHeaders, + #[class(generic)] #[error("{0}")] HttpParse(#[from] httparse::Error), + #[class(generic)] #[error("{0}")] Http(#[from] http::Error), + #[class(generic)] #[error("{0}")] Utf8(#[from] std::str::Utf8Error), + #[class(generic)] #[error("{0}")] InvalidHeaderName(#[from] http::header::InvalidHeaderName), + #[class(generic)] #[error("{0}")] InvalidHeaderValue(#[from] http::header::InvalidHeaderValue), + #[class("Http")] #[error("invalid HTTP status line")] InvalidHttpStatusLine, + #[class("Http")] #[error("attempted to write to completed upgrade buffer")] UpgradeBufferAlreadyCompleted, } diff --git a/ext/io/Cargo.toml b/ext/io/Cargo.toml index 782bd64444e51c..9d11e1b0f6856c 100644 --- a/ext/io/Cargo.toml +++ b/ext/io/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +deno_error.workspace = true filetime.workspace = true fs3.workspace = true log.workspace = true diff --git a/ext/io/fs.rs b/ext/io/fs.rs index ee0c7da5daf04a..d8767aa116a1e7 100644 --- a/ext/io/fs.rs +++ b/ext/io/fs.rs @@ -7,18 +7,24 @@ use std::rc::Rc; use std::time::SystemTime; use std::time::UNIX_EPOCH; +use deno_core::error::ResourceError; use deno_core::BufMutView; use deno_core::BufView; use deno_core::OpState; use deno_core::ResourceHandleFd; use deno_core::ResourceId; +use deno_error::JsErrorBox; use tokio::task::JoinError; -#[derive(Debug)] +#[derive(Debug, deno_error::JsError)] pub enum FsError { + #[class(inherit)] Io(io::Error), + #[class("Busy")] FileBusy, + #[class(not_supported)] NotSupported, + #[class("NotCapable")] NotCapable(&'static str), } @@ -277,18 +283,21 @@ impl FileResource { state: &OpState, rid: ResourceId, f: F, - ) -> Result + ) -> Result where - F: FnOnce(Rc) -> Result, + F: FnOnce(Rc) -> Result, { - let resource = state.resource_table.get::(rid)?; + let resource = state + .resource_table + .get::(rid) + .map_err(JsErrorBox::from_err)?; f(resource) } pub fn get_file( state: &OpState, rid: ResourceId, - ) -> Result, deno_core::error::AnyError> { + ) -> Result, ResourceError> { let resource = state.resource_table.get::(rid)?; Ok(resource.file()) } @@ -297,9 +306,9 @@ impl FileResource { state: &OpState, rid: ResourceId, f: F, - ) -> Result + ) -> Result where - F: FnOnce(Rc) -> Result, + F: FnOnce(Rc) -> Result, { Self::with_resource(state, rid, |r| f(r.file.clone())) } @@ -321,7 +330,7 @@ impl deno_core::Resource for FileResource { .clone() .read(limit) .await - .map_err(|err| err.into()) + .map_err(JsErrorBox::from_err) }) } @@ -335,7 +344,7 @@ impl deno_core::Resource for FileResource { .clone() .read_byob(buf) .await - .map_err(|err| err.into()) + .map_err(JsErrorBox::from_err) }) } @@ -344,7 +353,12 @@ impl deno_core::Resource for FileResource { buf: BufView, ) -> deno_core::AsyncResult { Box::pin(async move { - self.file.clone().write(buf).await.map_err(|err| err.into()) + self + .file + .clone() + .write(buf) + .await + .map_err(JsErrorBox::from_err) }) } @@ -355,22 +369,27 @@ impl deno_core::Resource for FileResource { .clone() .write_all(buf) .await - .map_err(|err| err.into()) + .map_err(JsErrorBox::from_err) }) } fn read_byob_sync( self: Rc, data: &mut [u8], - ) -> Result { - self.file.clone().read_sync(data).map_err(|err| err.into()) + ) -> Result { + self + .file + .clone() + .read_sync(data) + .map_err(JsErrorBox::from_err) } - fn write_sync( - self: Rc, - data: &[u8], - ) -> Result { - self.file.clone().write_sync(data).map_err(|err| err.into()) + fn write_sync(self: Rc, data: &[u8]) -> Result { + self + .file + .clone() + .write_sync(data) + .map_err(JsErrorBox::from_err) } fn backing_fd(self: Rc) -> Option { diff --git a/ext/io/lib.rs b/ext/io/lib.rs index f78e5a58fcb665..1f92ae5c8b3097 100644 --- a/ext/io/lib.rs +++ b/ext/io/lib.rs @@ -33,6 +33,7 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceHandle; use deno_core::ResourceHandleFd; +use deno_error::JsErrorBox; use fs::FileResource; use fs::FsError; use fs::FsResult; @@ -414,7 +415,7 @@ impl Resource for ChildStdinResource { deno_core::impl_writable!(); fn shutdown(self: Rc) -> AsyncResult<()> { - Box::pin(self.shutdown().map_err(|e| e.into())) + Box::pin(self.shutdown().map_err(JsErrorBox::from_err)) } } @@ -1007,9 +1008,11 @@ pub fn op_print( state: &mut OpState, #[string] msg: &str, is_err: bool, -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), JsErrorBox> { let rid = if is_err { 2 } else { 1 }; FileResource::with_file(state, rid, move |file| { - Ok(file.write_all_sync(msg.as_bytes())?) + file + .write_all_sync(msg.as_bytes()) + .map_err(JsErrorBox::from_err) }) } diff --git a/ext/kv/Cargo.toml b/ext/kv/Cargo.toml index 451fa50cf99392..1a1cd346fbd2dc 100644 --- a/ext/kv/Cargo.toml +++ b/ext/kv/Cargo.toml @@ -21,6 +21,7 @@ boxed_error.workspace = true bytes.workspace = true chrono = { workspace = true, features = ["now"] } deno_core.workspace = true +deno_error.workspace = true deno_fetch.workspace = true deno_path_util.workspace = true deno_permissions.workspace = true diff --git a/ext/kv/dynamic.rs b/ext/kv/dynamic.rs index 33e3d4842f548f..923e3cd4d82d80 100644 --- a/ext/kv/dynamic.rs +++ b/ext/kv/dynamic.rs @@ -4,9 +4,8 @@ use std::cell::RefCell; use std::rc::Rc; use async_trait::async_trait; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::OpState; +use deno_error::JsErrorBox; use denokv_proto::CommitResult; use denokv_proto::ReadRangeOutput; use denokv_proto::WatchStream; @@ -63,7 +62,7 @@ impl DatabaseHandler for MultiBackendDbHandler { &self, state: Rc>, path: Option, - ) -> Result { + ) -> Result { for (prefixes, handler) in &self.backends { for &prefix in *prefixes { if prefix.is_empty() { @@ -77,7 +76,7 @@ impl DatabaseHandler for MultiBackendDbHandler { } } } - Err(type_error(format!( + Err(JsErrorBox::type_error(format!( "No backend supports the given path: {:?}", path ))) @@ -90,7 +89,7 @@ pub trait DynamicDbHandler { &self, state: Rc>, path: Option, - ) -> Result; + ) -> Result; } #[async_trait(?Send)] @@ -101,7 +100,7 @@ impl DatabaseHandler for Box { &self, state: Rc>, path: Option, - ) -> Result { + ) -> Result { (**self).dyn_open(state, path).await } } @@ -116,7 +115,7 @@ where &self, state: Rc>, path: Option, - ) -> Result { + ) -> Result { Ok(RcDynamicDb(Rc::new(self.open(state, path).await?))) } } @@ -127,16 +126,16 @@ pub trait DynamicDb { &self, requests: Vec, options: SnapshotReadOptions, - ) -> Result, AnyError>; + ) -> Result, JsErrorBox>; async fn dyn_atomic_write( &self, write: AtomicWrite, - ) -> Result, AnyError>; + ) -> Result, JsErrorBox>; async fn dyn_dequeue_next_message( &self, - ) -> Result>, AnyError>; + ) -> Result>, JsErrorBox>; fn dyn_watch(&self, keys: Vec>) -> WatchStream; @@ -154,20 +153,20 @@ impl Database for RcDynamicDb { &self, requests: Vec, options: SnapshotReadOptions, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { (*self.0).dyn_snapshot_read(requests, options).await } async fn atomic_write( &self, write: AtomicWrite, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { (*self.0).dyn_atomic_write(write).await } async fn dequeue_next_message( &self, - ) -> Result>, AnyError> { + ) -> Result>, JsErrorBox> { (*self.0).dyn_dequeue_next_message().await } @@ -190,20 +189,20 @@ where &self, requests: Vec, options: SnapshotReadOptions, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { Ok(self.snapshot_read(requests, options).await?) } async fn dyn_atomic_write( &self, write: AtomicWrite, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { Ok(self.atomic_write(write).await?) } async fn dyn_dequeue_next_message( &self, - ) -> Result>, AnyError> { + ) -> Result>, JsErrorBox> { Ok( self .dequeue_next_message() diff --git a/ext/kv/interface.rs b/ext/kv/interface.rs index fec0ef1afdd070..df106fde7863ae 100644 --- a/ext/kv/interface.rs +++ b/ext/kv/interface.rs @@ -4,8 +4,8 @@ use std::cell::RefCell; use std::rc::Rc; use async_trait::async_trait; -use deno_core::error::AnyError; use deno_core::OpState; +use deno_error::JsErrorBox; use denokv_proto::Database; #[async_trait(?Send)] @@ -16,5 +16,5 @@ pub trait DatabaseHandler { &self, state: Rc>, path: Option, - ) -> Result; + ) -> Result; } diff --git a/ext/kv/lib.rs b/ext/kv/lib.rs index 82dda6d7595da3..61458888a90c72 100644 --- a/ext/kv/lib.rs +++ b/ext/kv/lib.rs @@ -17,7 +17,6 @@ use base64::Engine; use boxed_error::Boxed; use chrono::DateTime; use chrono::Utc; -use deno_core::error::get_custom_error_class; use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::serde_v8::AnyValue; @@ -32,6 +31,8 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; use denokv_proto::decode_key; use denokv_proto::encode_key; use denokv_proto::AtomicWrite; @@ -115,65 +116,93 @@ impl Resource for DatabaseWatcherResource { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, deno_error::JsError)] pub struct KvError(pub Box); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum KvErrorKind { + #[class(inherit)] #[error(transparent)] - DatabaseHandler(deno_core::error::AnyError), + DatabaseHandler(JsErrorBox), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(type)] #[error("Too many ranges (max {0})")] TooManyRanges(usize), + #[class(type)] #[error("Too many entries (max {0})")] TooManyEntries(usize), + #[class(type)] #[error("Too many checks (max {0})")] TooManyChecks(usize), + #[class(type)] #[error("Too many mutations (max {0})")] TooManyMutations(usize), + #[class(type)] #[error("Too many keys (max {0})")] TooManyKeys(usize), + #[class(type)] #[error("limit must be greater than 0")] InvalidLimit, + #[class(type)] #[error("Invalid boundary key")] InvalidBoundaryKey, + #[class(type)] #[error("Key too large for read (max {0} bytes)")] KeyTooLargeToRead(usize), + #[class(type)] #[error("Key too large for write (max {0} bytes)")] KeyTooLargeToWrite(usize), + #[class(type)] #[error("Total mutation size too large (max {0} bytes)")] TotalMutationTooLarge(usize), + #[class(type)] #[error("Total key size too large (max {0} bytes)")] TotalKeyTooLarge(usize), + #[class(inherit)] #[error(transparent)] - Kv(deno_core::error::AnyError), + Kv(JsErrorBox), + #[class(inherit)] #[error(transparent)] Io(#[from] std::io::Error), + #[class(type)] #[error("Queue message not found")] QueueMessageNotFound, + #[class(type)] #[error("Start key is not in the keyspace defined by prefix")] StartKeyNotInKeyspace, + #[class(type)] #[error("End key is not in the keyspace defined by prefix")] EndKeyNotInKeyspace, + #[class(type)] #[error("Start key is greater than end key")] StartKeyGreaterThanEndKey, + #[class(inherit)] #[error("Invalid check")] InvalidCheck(#[source] KvCheckError), + #[class(inherit)] #[error("Invalid mutation")] InvalidMutation(#[source] KvMutationError), + #[class(inherit)] #[error("Invalid enqueue")] InvalidEnqueue(#[source] std::io::Error), + #[class(type)] #[error("key cannot be empty")] - EmptyKey, // TypeError + EmptyKey, + #[class(type)] #[error("Value too large (max {0} bytes)")] - ValueTooLarge(usize), // TypeError + ValueTooLarge(usize), + #[class(type)] #[error("enqueue payload too large (max {0} bytes)")] - EnqueuePayloadTooLarge(usize), // TypeError + EnqueuePayloadTooLarge(usize), + #[class(type)] #[error("invalid cursor")] InvalidCursor, + #[class(type)] #[error("cursor out of bounds")] CursorOutOfBounds, + #[class(type)] #[error("Invalid range")] InvalidRange, } @@ -418,7 +447,7 @@ where match state.resource_table.get::>(rid) { Ok(resource) => resource, Err(err) => { - if get_custom_error_class(&err) == Some("BadResource") { + if err.get_class() == "BadResource" { return Ok(None); } else { return Err(KvErrorKind::Resource(err).into_box()); @@ -568,10 +597,12 @@ where Ok(()) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum KvCheckError { + #[class(type)] #[error("invalid versionstamp")] InvalidVersionstamp, + #[class(inherit)] #[error(transparent)] Io(std::io::Error), } @@ -597,14 +628,22 @@ fn check_from_v8(value: V8KvCheck) -> Result { }) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum KvMutationError { + #[class(generic)] #[error(transparent)] BigInt(#[from] num_bigint::TryFromBigIntError), + #[class(inherit)] #[error(transparent)] - Io(#[from] std::io::Error), + Io( + #[from] + #[inherit] + std::io::Error, + ), + #[class(type)] #[error("Invalid mutation '{0}' with value")] InvalidMutationWithValue(String), + #[class(type)] #[error("Invalid mutation '{0}' without value")] InvalidMutationWithoutValue(String), } diff --git a/ext/kv/remote.rs b/ext/kv/remote.rs index cb408ef644d541..e5a07ad96c3dc5 100644 --- a/ext/kv/remote.rs +++ b/ext/kv/remote.rs @@ -8,10 +8,9 @@ use std::sync::Arc; use anyhow::Context; use async_trait::async_trait; use bytes::Bytes; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures::Stream; use deno_core::OpState; +use deno_error::JsErrorBox; use deno_fetch::create_http_client; use deno_fetch::CreateHttpClientOptions; use deno_permissions::PermissionCheckError; @@ -38,7 +37,7 @@ pub struct HttpOptions { } impl HttpOptions { - pub fn root_cert_store(&self) -> Result, AnyError> { + pub fn root_cert_store(&self) -> Result, JsErrorBox> { Ok(match &self.root_cert_store_provider { Some(provider) => Some(provider.get_or_try_init()?.clone()), None => None, @@ -102,12 +101,12 @@ impl Clone for PermissionChecker

{ impl denokv_remote::RemotePermissions for PermissionChecker

{ - fn check_net_url(&self, url: &Url) -> Result<(), anyhow::Error> { + fn check_net_url(&self, url: &Url) -> Result<(), JsErrorBox> { let mut state = self.state.borrow_mut(); let permissions = state.borrow_mut::

(); permissions .check_net_url(url, "Deno.openKv") - .map_err(Into::into) + .map_err(JsErrorBox::from_err) } } @@ -122,31 +121,43 @@ impl RemoteTransport for FetchClient { url: Url, headers: http::HeaderMap, body: Bytes, - ) -> Result<(Url, http::StatusCode, Self::Response), anyhow::Error> { + ) -> Result<(Url, http::StatusCode, Self::Response), JsErrorBox> { let body = deno_fetch::ReqBody::full(body); let mut req = http::Request::new(body); *req.method_mut() = http::Method::POST; - *req.uri_mut() = url.as_str().parse()?; + *req.uri_mut() = + url.as_str().parse().map_err(|e: http::uri::InvalidUri| { + JsErrorBox::type_error(e.to_string()) + })?; *req.headers_mut() = headers; - let res = self.0.clone().send(req).await?; + let res = self + .0 + .clone() + .send(req) + .await + .map_err(JsErrorBox::from_err)?; let status = res.status(); Ok((url, status, FetchResponse(res))) } } impl RemoteResponse for FetchResponse { - async fn bytes(self) -> Result { + async fn bytes(self) -> Result { Ok(self.0.collect().await?.to_bytes()) } fn stream( self, - ) -> impl Stream> + Send + Sync { + ) -> impl Stream> + Send + Sync { self.0.into_body().into_data_stream() } - async fn text(self) -> Result { + async fn text(self) -> Result { let bytes = self.bytes().await?; - Ok(std::str::from_utf8(&bytes)?.into()) + Ok( + std::str::from_utf8(&bytes) + .map_err(JsErrorBox::from_err)? + .into(), + ) } } @@ -160,29 +171,36 @@ impl DatabaseHandler &self, state: Rc>, path: Option, - ) -> Result { + ) -> Result { const ENV_VAR_NAME: &str = "DENO_KV_ACCESS_TOKEN"; let Some(url) = path else { - return Err(type_error("Missing database url")); + return Err(JsErrorBox::type_error("Missing database url")); }; let Ok(parsed_url) = Url::parse(&url) else { - return Err(type_error(format!("Invalid database url: {}", url))); + return Err(JsErrorBox::type_error(format!( + "Invalid database url: {}", + url + ))); }; { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - permissions.check_env(ENV_VAR_NAME)?; - permissions.check_net_url(&parsed_url, "Deno.openKv")?; + permissions + .check_env(ENV_VAR_NAME) + .map_err(JsErrorBox::from_err)?; + permissions + .check_net_url(&parsed_url, "Deno.openKv") + .map_err(JsErrorBox::from_err)?; } let access_token = std::env::var(ENV_VAR_NAME) .map_err(anyhow::Error::from) .with_context(|| { "Missing DENO_KV_ACCESS_TOKEN environment variable. Please set it to your access token from https://dash.deno.com/account." - })?; + }).map_err(|e| JsErrorBox::generic(e.to_string()))?; let metadata_endpoint = MetadataEndpoint { url: parsed_url.clone(), @@ -211,7 +229,8 @@ impl DatabaseHandler http2: true, client_builder_hook: None, }, - )?; + ) + .map_err(JsErrorBox::from_err)?; let fetch_client = FetchClient(client); let permissions = PermissionChecker { diff --git a/ext/kv/sqlite.rs b/ext/kv/sqlite.rs index a88c61d7237a89..8be042eef0ea8e 100644 --- a/ext/kv/sqlite.rs +++ b/ext/kv/sqlite.rs @@ -14,10 +14,9 @@ use std::sync::Mutex; use std::sync::OnceLock; use async_trait::async_trait; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::unsync::spawn_blocking; use deno_core::OpState; +use deno_error::JsErrorBox; use deno_path_util::normalize_path; use deno_permissions::PermissionCheckError; pub use denokv_sqlite::SqliteBackendError; @@ -85,6 +84,12 @@ impl SqliteDbHandler

{ } } +deno_error::js_error_wrapper!( + SqliteBackendError, + JsSqliteBackendError, + "TypeError" +); + #[async_trait(?Send)] impl DatabaseHandler for SqliteDbHandler

{ type DB = denokv_sqlite::Sqlite; @@ -93,12 +98,12 @@ impl DatabaseHandler for SqliteDbHandler

{ &self, state: Rc>, path: Option, - ) -> Result { + ) -> Result { #[must_use = "the resolved return value to mitigate time-of-check to time-of-use issues"] fn validate_path( state: &RefCell, path: Option, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { let Some(path) = path else { return Ok(None); }; @@ -106,18 +111,22 @@ impl DatabaseHandler for SqliteDbHandler

{ return Ok(Some(path)); } if path.is_empty() { - return Err(type_error("Filename cannot be empty")); + return Err(JsErrorBox::type_error("Filename cannot be empty")); } if path.starts_with(':') { - return Err(type_error( + return Err(JsErrorBox::type_error( "Filename cannot start with ':' unless prefixed with './'", )); } { let mut state = state.borrow_mut(); let permissions = state.borrow_mut::

(); - let path = permissions.check_read(&path, "Deno.openKv")?; - let path = permissions.check_write(&path, "Deno.openKv")?; + let path = permissions + .check_read(&path, "Deno.openKv") + .map_err(JsErrorBox::from_err)?; + let path = permissions + .check_write(&path, "Deno.openKv") + .map_err(JsErrorBox::from_err)?; Ok(Some(path.to_string_lossy().to_string())) } } @@ -138,7 +147,7 @@ impl DatabaseHandler for SqliteDbHandler

{ let flags = OpenFlags::default().difference(OpenFlags::SQLITE_OPEN_URI); let resolved_path = canonicalize_path(&PathBuf::from(path)) - .map_err(anyhow::Error::from)?; + .map_err(JsErrorBox::from_err)?; let path = path.to_string(); ( Arc::new(move || { @@ -148,7 +157,7 @@ impl DatabaseHandler for SqliteDbHandler

{ ) } (None, Some(path)) => { - std::fs::create_dir_all(path).map_err(anyhow::Error::from)?; + std::fs::create_dir_all(path).map_err(JsErrorBox::from_err)?; let path = path.join("kv.sqlite3"); let path2 = path.clone(); ( @@ -162,7 +171,8 @@ impl DatabaseHandler for SqliteDbHandler

{ }) }) .await - .unwrap()?; + .unwrap() + .map_err(JsErrorBox::from_err)?; let notifier = if let Some(notifier_key) = notifier_key { SQLITE_NOTIFIERS_MAP @@ -185,8 +195,11 @@ impl DatabaseHandler for SqliteDbHandler

{ denokv_sqlite::Sqlite::new( move || { - let conn = conn_gen()?; - conn.pragma_update(None, "journal_mode", "wal")?; + let conn = + conn_gen().map_err(|e| JsErrorBox::generic(e.to_string()))?; + conn + .pragma_update(None, "journal_mode", "wal") + .map_err(|e| JsErrorBox::generic(e.to_string()))?; Ok(( conn, match versionstamp_rng_seed { @@ -198,11 +211,12 @@ impl DatabaseHandler for SqliteDbHandler

{ notifier, config, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } } /// Same as Path::canonicalize, but also handles non-existing paths. -fn canonicalize_path(path: &Path) -> Result { +fn canonicalize_path(path: &Path) -> Result { let path = normalize_path(path); let mut path = path; let mut names_stack = Vec::new(); @@ -225,7 +239,7 @@ fn canonicalize_path(path: &Path) -> Result { path.clone_from(¤t_dir); } } - Err(err) => return Err(err.into()), + Err(err) => return Err(err), } } } diff --git a/ext/napi/Cargo.toml b/ext/napi/Cargo.toml index a17fd9e0f65cd1..ac62cc5dc8d9db 100644 --- a/ext/napi/Cargo.toml +++ b/ext/napi/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_permissions.workspace = true libc.workspace = true libloading = { version = "0.7" } diff --git a/ext/napi/lib.rs b/ext/napi/lib.rs index 3f495e05e6e5b3..1db20ef647d43b 100644 --- a/ext/napi/lib.rs +++ b/ext/napi/lib.rs @@ -24,47 +24,49 @@ pub mod uv; use core::ptr::NonNull; use std::cell::RefCell; use std::collections::HashMap; +pub use std::ffi::CStr; +pub use std::os::raw::c_char; +pub use std::os::raw::c_void; use std::path::PathBuf; +pub use std::ptr; use std::rc::Rc; use std::thread_local; use deno_core::op2; use deno_core::parking_lot::RwLock; use deno_core::url::Url; +// Expose common stuff for ease of use. +// `use deno_napi::*` +pub use deno_core::v8; use deno_core::ExternalOpsTracker; use deno_core::OpState; use deno_core::V8CrossThreadTaskSpawner; +use deno_permissions::PermissionCheckError; +#[cfg(unix)] +use libloading::os::unix::*; +#[cfg(windows)] +use libloading::os::windows::*; +pub use value::napi_value; -#[derive(Debug, thiserror::Error)] +pub mod function; +mod value; + +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum NApiError { + #[class(type)] #[error("Invalid path")] InvalidPath, + #[class(type)] #[error(transparent)] LibLoading(#[from] libloading::Error), + #[class(type)] #[error("Unable to find register Node-API module at {}", .0.display())] ModuleNotFound(PathBuf), + #[class(inherit)] #[error(transparent)] Permission(#[from] PermissionCheckError), } -pub use std::ffi::CStr; -pub use std::os::raw::c_char; -pub use std::os::raw::c_void; -pub use std::ptr; - -// Expose common stuff for ease of use. -// `use deno_napi::*` -pub use deno_core::v8; -use deno_permissions::PermissionCheckError; -#[cfg(unix)] -use libloading::os::unix::*; -#[cfg(windows)] -use libloading::os::windows::*; -pub use value::napi_value; - -pub mod function; -mod value; - pub type napi_status = i32; pub type napi_env = *mut c_void; pub type napi_callback_info = *mut c_void; diff --git a/ext/net/Cargo.toml b/ext/net/Cargo.toml index a834d8c8f36036..ad20badb10c14c 100644 --- a/ext/net/Cargo.toml +++ b/ext/net/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_permissions.workspace = true deno_tls.workspace = true hickory-proto = "0.25.0-alpha.4" diff --git a/ext/net/io.rs b/ext/net/io.rs index 3f12b92a9a7903..71ae577cd0c875 100644 --- a/ext/net/io.rs +++ b/ext/net/io.rs @@ -11,6 +11,7 @@ use deno_core::CancelHandle; use deno_core::CancelTryFuture; use deno_core::RcRef; use deno_core::Resource; +use deno_error::JsErrorBox; use socket2::SockRef; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt; @@ -90,10 +91,12 @@ where } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum MapError { + #[class(inherit)] #[error("{0}")] Io(std::io::Error), + #[class(generic)] #[error("Unable to get resources")] NoResources, } @@ -110,7 +113,7 @@ impl Resource for TcpStreamResource { } fn shutdown(self: Rc) -> AsyncResult<()> { - Box::pin(self.shutdown().map_err(Into::into)) + Box::pin(self.shutdown().map_err(JsErrorBox::from_err)) } fn close(self: Rc) { @@ -162,9 +165,7 @@ impl UnixStreamResource { unreachable!() } #[allow(clippy::unused_async)] - pub async fn shutdown( - self: Rc, - ) -> Result<(), deno_core::error::AnyError> { + pub async fn shutdown(self: Rc) -> Result<(), JsErrorBox> { unreachable!() } pub fn cancel_read_ops(&self) { @@ -181,7 +182,7 @@ impl Resource for UnixStreamResource { } fn shutdown(self: Rc) -> AsyncResult<()> { - Box::pin(self.shutdown().map_err(Into::into)) + Box::pin(self.shutdown().map_err(JsErrorBox::from_err)) } fn close(self: Rc) { diff --git a/ext/net/lib.rs b/ext/net/lib.rs index 726c51b7494bf2..b21da19f30613d 100644 --- a/ext/net/lib.rs +++ b/ext/net/lib.rs @@ -15,7 +15,6 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use deno_core::error::AnyError; use deno_core::OpState; use deno_permissions::PermissionCheckError; use deno_tls::rustls::RootCertStore; @@ -107,7 +106,9 @@ pub struct DefaultTlsOptions { } impl DefaultTlsOptions { - pub fn root_cert_store(&self) -> Result, AnyError> { + pub fn root_cert_store( + &self, + ) -> Result, deno_error::JsErrorBox> { Ok(match &self.root_cert_store_provider { Some(provider) => Some(provider.get_or_try_init()?.clone()), None => None, diff --git a/ext/net/ops.rs b/ext/net/ops.rs index 1fb0f39280382b..768dd33135e311 100644 --- a/ext/net/ops.rs +++ b/ext/net/ops.rs @@ -67,60 +67,87 @@ impl From for IpAddr { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum NetError { + #[class("BadResource")] #[error("Listener has been closed")] ListenerClosed, + #[class("Busy")] #[error("Listener already in use")] ListenerBusy, + #[class("BadResource")] #[error("Socket has been closed")] SocketClosed, + #[class("NotConnected")] #[error("Socket has been closed")] SocketClosedNotConnected, + #[class("Busy")] #[error("Socket already in use")] SocketBusy, + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), + #[class("Busy")] #[error("Another accept task is ongoing")] AcceptTaskOngoing, + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error("{0}")] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(generic)] #[error("No resolved address found")] NoResolvedAddress, + #[class(generic)] #[error("{0}")] AddrParse(#[from] std::net::AddrParseError), + #[class(inherit)] #[error("{0}")] Map(crate::io::MapError), + #[class(inherit)] #[error("{0}")] Canceled(#[from] deno_core::Canceled), + #[class("NotFound")] #[error("{0}")] DnsNotFound(ResolveError), + #[class("NotConnected")] #[error("{0}")] DnsNotConnected(ResolveError), + #[class("TimedOut")] #[error("{0}")] DnsTimedOut(ResolveError), + #[class(generic)] #[error("{0}")] Dns(#[from] ResolveError), + #[class("NotSupported")] #[error("Provided record type is not supported")] UnsupportedRecordType, + #[class("InvalidData")] #[error("File name or path {0:?} is not valid UTF-8")] InvalidUtf8(std::ffi::OsString), + #[class(generic)] #[error("unexpected key type")] UnexpectedKeyType, + #[class(type)] #[error("Invalid hostname: '{0}'")] - InvalidHostname(String), // TypeError + InvalidHostname(String), + #[class("Busy")] #[error("TCP stream is currently in use")] TcpStreamBusy, + #[class(generic)] #[error("{0}")] Rustls(#[from] deno_tls::rustls::Error), + #[class(inherit)] #[error("{0}")] Tls(#[from] deno_tls::TlsError), + #[class("InvalidData")] #[error("Error creating TLS certificate: Deno.listenTls requires a key")] - ListenTlsRequiresKey, // InvalidData + ListenTlsRequiresKey, + #[class(inherit)] #[error("{0}")] - RootCertStore(deno_core::anyhow::Error), + RootCertStore(deno_error::JsErrorBox), + #[class(generic)] #[error("{0}")] Reunite(tokio::net::tcp::ReuniteError), } @@ -713,10 +740,8 @@ pub fn op_set_nodelay_inner( rid: ResourceId, nodelay: bool, ) -> Result<(), NetError> { - let resource: Rc = state - .resource_table - .get::(rid) - .map_err(NetError::Resource)?; + let resource: Rc = + state.resource_table.get::(rid)?; resource.set_nodelay(nodelay).map_err(NetError::Map) } @@ -735,10 +760,8 @@ pub fn op_set_keepalive_inner( rid: ResourceId, keepalive: bool, ) -> Result<(), NetError> { - let resource: Rc = state - .resource_table - .get::(rid) - .map_err(NetError::Resource)?; + let resource: Rc = + state.resource_table.get::(rid)?; resource.set_keepalive(keepalive).map_err(NetError::Map) } diff --git a/ext/net/ops_tls.rs b/ext/net/ops_tls.rs index 5b8cd47751b77c..12c65801366f09 100644 --- a/ext/net/ops_tls.rs +++ b/ext/net/ops_tls.rs @@ -23,6 +23,7 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsErrorBox; use deno_tls::create_client_config; use deno_tls::load_certs; use deno_tls::load_private_keys; @@ -163,7 +164,7 @@ impl Resource for TlsStreamResource { } fn shutdown(self: Rc) -> AsyncResult<()> { - Box::pin(self.shutdown().map_err(Into::into)) + Box::pin(self.shutdown().map_err(JsErrorBox::from_err)) } fn close(self: Rc) { diff --git a/ext/net/quic.rs b/ext/net/quic.rs index 8b06d46d1996c9..e0757454957c13 100644 --- a/ext/net/quic.rs +++ b/ext/net/quic.rs @@ -17,6 +17,7 @@ use std::task::Context; use std::task::Poll; use std::time::Duration; +use deno_core::error::ResourceError; use deno_core::futures::task::noop_waker_ref; use deno_core::op2; use deno_core::AsyncRefCell; @@ -30,6 +31,8 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::WriteOutcome; +use deno_error::JsError; +use deno_error::JsErrorBox; use deno_permissions::PermissionCheckError; use deno_tls::create_client_config; use deno_tls::SocketUse; @@ -49,40 +52,59 @@ use crate::DefaultTlsOptions; use crate::NetPermissions; use crate::UnsafelyIgnoreCertificateErrors; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, JsError)] pub enum QuicError { + #[class(generic)] #[error("Endpoint created by 'connectQuic' cannot be used for listening")] CannotListen, + #[class(type)] #[error("key and cert are required")] MissingTlsKey, + #[class(type)] #[error("Duration is invalid")] InvalidDuration, + #[class(generic)] #[error("Unable to resolve hostname")] UnableToResolve, + #[class(inherit)] #[error("{0}")] StdIo(#[from] std::io::Error), + #[class(inherit)] #[error("{0}")] PermissionCheck(#[from] PermissionCheckError), + #[class(range)] #[error("{0}")] VarIntBoundsExceeded(#[from] quinn::VarIntBoundsExceeded), + #[class(generic)] #[error("{0}")] Rustls(#[from] quinn::rustls::Error), + #[class(inherit)] #[error("{0}")] Tls(#[from] TlsError), + #[class(generic)] #[error("{0}")] ConnectionError(#[from] quinn::ConnectionError), + #[class(generic)] #[error("{0}")] ConnectError(#[from] quinn::ConnectError), + #[class(generic)] #[error("{0}")] SendDatagramError(#[from] quinn::SendDatagramError), + #[class("BadResource")] #[error("{0}")] ClosedStream(#[from] quinn::ClosedStream), + #[class("BadResource")] #[error("Invalid {0} resource")] BadResource(&'static str), + #[class(range)] #[error("Connection has reached the maximum number of concurrent outgoing {0} streams")] MaxStreams(&'static str), + #[class(generic)] #[error("{0}")] Core(#[from] deno_core::error::AnyError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), } #[derive(Serialize, Deserialize)] @@ -658,8 +680,13 @@ impl Resource for SendStreamResource { Box::pin(async move { let mut stream = RcRef::map(self.clone(), |r| &r.stream).borrow_mut().await; - stream.set_priority(self.priority.load(Ordering::Relaxed))?; - let nwritten = stream.write(&view).await?; + stream + .set_priority(self.priority.load(Ordering::Relaxed)) + .map_err(|e| JsErrorBox::from_err(std::io::Error::from(e)))?; + let nwritten = stream + .write(&view) + .await + .map_err(|e| JsErrorBox::from_err(std::io::Error::from(e)))?; Ok(WriteOutcome::Partial { nwritten, view }) }) } @@ -690,7 +717,11 @@ impl Resource for RecvStreamResource { Box::pin(async move { let mut r = RcRef::map(self, |r| &r.stream).borrow_mut().await; let mut data = vec![0; limit]; - let nread = r.read(&mut data).await?.unwrap_or(0); + let nread = r + .read(&mut data) + .await + .map_err(|e| JsErrorBox::from_err(std::io::Error::from(e)))? + .unwrap_or(0); data.truncate(nread); Ok(BufView::from(data)) }) @@ -702,7 +733,11 @@ impl Resource for RecvStreamResource { ) -> AsyncResult<(usize, BufMutView)> { Box::pin(async move { let mut r = RcRef::map(self, |r| &r.stream).borrow_mut().await; - let nread = r.read(&mut buf).await?.unwrap_or(0); + let nread = r + .read(&mut buf) + .await + .map_err(|e| JsErrorBox::from_err(std::io::Error::from(e)))? + .unwrap_or(0); Ok((nread, buf)) }) } @@ -710,7 +745,8 @@ impl Resource for RecvStreamResource { fn shutdown(self: Rc) -> AsyncResult<()> { Box::pin(async move { let mut r = RcRef::map(self, |r| &r.stream).borrow_mut().await; - r.stop(quinn::VarInt::from(0u32))?; + r.stop(quinn::VarInt::from(0u32)) + .map_err(|e| JsErrorBox::from_err(std::io::Error::from(e)))?; Ok(()) }) } @@ -835,15 +871,15 @@ pub(crate) async fn op_quic_connection_read_datagram( #[op2(fast)] pub(crate) fn op_quic_connection_get_max_datagram_size( #[cppgc] connection: &ConnectionResource, -) -> Result { - Ok(connection.0.max_datagram_size().unwrap_or(0) as _) +) -> u32 { + connection.0.max_datagram_size().unwrap_or(0) as _ } #[op2(fast)] pub(crate) fn op_quic_send_stream_get_priority( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow() .resource_table @@ -856,7 +892,7 @@ pub(crate) fn op_quic_send_stream_set_priority( state: Rc>, #[smi] rid: ResourceId, priority: i32, -) -> Result<(), QuicError> { +) -> Result<(), ResourceError> { let resource = state .borrow() .resource_table @@ -870,7 +906,7 @@ pub(crate) fn op_quic_send_stream_set_priority( pub(crate) fn op_quic_send_stream_get_id( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow() .resource_table @@ -884,7 +920,7 @@ pub(crate) fn op_quic_send_stream_get_id( pub(crate) fn op_quic_recv_stream_get_id( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow() .resource_table diff --git a/ext/net/raw.rs b/ext/net/raw.rs index 0312d661a57485..fc380635f6fa53 100644 --- a/ext/net/raw.rs +++ b/ext/net/raw.rs @@ -1,15 +1,15 @@ // Copyright 2018-2025 the Deno authors. MIT license. + use std::borrow::Cow; use std::rc::Rc; -use deno_core::error::bad_resource_id; -use deno_core::error::custom_error; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::AsyncRefCell; use deno_core::CancelHandle; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ResourceTable; +use deno_error::JsErrorBox; use crate::io::TcpStreamResource; use crate::ops_tls::TlsStreamResource; @@ -69,10 +69,10 @@ impl NetworkListenerResource { fn take( resource_table: &mut ResourceTable, listener_rid: ResourceId, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { if let Ok(resource_rc) = resource_table.take::(listener_rid) { let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "Listener is currently in use"))?; + .map_err(|_| JsErrorBox::new("Busy", "Listener is currently in use"))?; return Ok(Some(resource.listener.into_inner().into())); } Ok(None) @@ -243,13 +243,13 @@ macro_rules! network_stream { /// Return a `NetworkStreamListener` if a resource exists for this `ResourceId` and it is currently /// not locked. - pub fn take_resource(resource_table: &mut ResourceTable, listener_rid: ResourceId) -> Result { + pub fn take_resource(resource_table: &mut ResourceTable, listener_rid: ResourceId) -> Result { $( if let Some(resource) = NetworkListenerResource::<$listener>::take(resource_table, listener_rid)? { return Ok(resource) } )* - Err(bad_resource_id()) + Err(JsErrorBox::from_err(ResourceError::BadResourceId)) } } }; @@ -322,12 +322,36 @@ impl From for NetworkStreamAddress { } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum TakeNetworkStreamError { + #[class("Busy")] + #[error("TCP stream is currently in use")] + TcpBusy, + #[class("Busy")] + #[error("TLS stream is currently in use")] + TlsBusy, + #[cfg(unix)] + #[class("Busy")] + #[error("Unix socket is currently in use")] + UnixBusy, + #[class(generic)] + #[error(transparent)] + ReuniteTcp(#[from] tokio::net::tcp::ReuniteError), + #[cfg(unix)] + #[class(generic)] + #[error(transparent)] + ReuniteUnix(#[from] tokio::net::unix::ReuniteError), + #[class(inherit)] + #[error(transparent)] + Resource(deno_core::error::ResourceError), +} + /// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). /// This method will extract a stream from the resource table and return it, unwrapped. pub fn take_network_stream_resource( resource_table: &mut ResourceTable, stream_rid: ResourceId, -) -> Result { +) -> Result { // The stream we're attempting to unwrap may be in use somewhere else. If that's the case, we cannot proceed // with the process of unwrapping this connection, so we just return a bad resource error. // See also: https://github.com/denoland/deno/pull/16242 @@ -336,7 +360,7 @@ pub fn take_network_stream_resource( { // This TCP connection might be used somewhere else. let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "TCP stream is currently in use"))?; + .map_err(|_| TakeNetworkStreamError::TcpBusy)?; let (read_half, write_half) = resource.into_inner(); let tcp_stream = read_half.reunite(write_half)?; return Ok(NetworkStream::Tcp(tcp_stream)); @@ -346,7 +370,7 @@ pub fn take_network_stream_resource( { // This TLS connection might be used somewhere else. let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "TLS stream is currently in use"))?; + .map_err(|_| TakeNetworkStreamError::TlsBusy)?; let (read_half, write_half) = resource.into_inner(); let tls_stream = read_half.unsplit(write_half); return Ok(NetworkStream::Tls(tls_stream)); @@ -358,13 +382,15 @@ pub fn take_network_stream_resource( { // This UNIX socket might be used somewhere else. let resource = Rc::try_unwrap(resource_rc) - .map_err(|_| custom_error("Busy", "Unix socket is currently in use"))?; + .map_err(|_| TakeNetworkStreamError::UnixBusy)?; let (read_half, write_half) = resource.into_inner(); let unix_stream = read_half.reunite(write_half)?; return Ok(NetworkStream::Unix(unix_stream)); } - Err(bad_resource_id()) + Err(TakeNetworkStreamError::Resource( + ResourceError::BadResourceId, + )) } /// In some cases it may be more efficient to extract the resource from the resource table and use it directly (for example, an HTTP server). @@ -372,6 +398,6 @@ pub fn take_network_stream_resource( pub fn take_network_stream_listener_resource( resource_table: &mut ResourceTable, listener_rid: ResourceId, -) -> Result { +) -> Result { NetworkStreamListener::take_resource(resource_table, listener_rid) } diff --git a/ext/node/Cargo.toml b/ext/node/Cargo.toml index 0f6ae1d7ab8a68..19936b74f10739 100644 --- a/ext/node/Cargo.toml +++ b/ext/node/Cargo.toml @@ -29,6 +29,7 @@ cbc.workspace = true const-oid = "0.9.5" data-encoding.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_fetch.workspace = true deno_fs.workspace = true deno_io.workspace = true diff --git a/ext/node/lib.rs b/ext/node/lib.rs index d7aa82430da02a..64b6c006a14581 100644 --- a/ext/node/lib.rs +++ b/ext/node/lib.rs @@ -8,12 +8,12 @@ use std::collections::HashSet; use std::path::Path; use std::path::PathBuf; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; #[allow(unused_imports)] use deno_core::v8; use deno_core::v8::ExternalReference; +use deno_error::JsErrorBox; use node_resolver::errors::ClosestPkgJsonError; use node_resolver::IsBuiltInNodeModuleChecker; use node_resolver::NpmPackageFolderResolverRc; @@ -157,12 +157,12 @@ pub trait NodeRequireLoader { &self, permissions: &mut dyn NodePermissions, path: &'a Path, - ) -> Result, AnyError>; + ) -> Result, JsErrorBox>; fn load_text_file_lossy( &self, path: &Path, - ) -> Result, AnyError>; + ) -> Result, JsErrorBox>; /// Get if the module kind is maybe CJS and loading should determine /// if its CJS or ESM. diff --git a/ext/node/ops/blocklist.rs b/ext/node/ops/blocklist.rs index bcb36fc97b4c5b..16bda73fe0df4c 100644 --- a/ext/node/ops/blocklist.rs +++ b/ext/node/ops/blocklist.rs @@ -23,7 +23,8 @@ impl deno_core::GarbageCollected for BlockListResource {} #[derive(Serialize)] struct SocketAddressSerialization(String, String); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] pub enum BlocklistError { #[error("{0}")] AddrParse(#[from] std::net::AddrParseError), diff --git a/ext/node/ops/buffer.rs b/ext/node/ops/buffer.rs index e3ae2b2391fa69..0e8cff5cc05596 100644 --- a/ext/node/ops/buffer.rs +++ b/ext/node/ops/buffer.rs @@ -1,8 +1,7 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use deno_core::anyhow::anyhow; -use deno_core::anyhow::Result; use deno_core::op2; +use deno_error::JsErrorBox; #[op2(fast)] pub fn op_is_ascii(#[buffer] buf: &[u8]) -> bool { @@ -20,7 +19,7 @@ pub fn op_transcode( #[buffer] source: &[u8], #[string] from_encoding: &str, #[string] to_encoding: &str, -) -> Result> { +) -> Result, JsErrorBox> { match (from_encoding, to_encoding) { ("utf8", "ascii") => Ok(utf8_to_ascii(source)), ("utf8", "latin1") => Ok(utf8_to_latin1(source)), @@ -29,7 +28,9 @@ pub fn op_transcode( ("latin1", "utf16le") | ("ascii", "utf16le") => { Ok(latin1_ascii_to_utf16le(source)) } - (from, to) => Err(anyhow!("Unable to transcode Buffer {from}->{to}")), + (from, to) => Err(JsErrorBox::generic(format!( + "Unable to transcode Buffer {from}->{to}" + ))), } } @@ -42,18 +43,19 @@ fn latin1_ascii_to_utf16le(source: &[u8]) -> Vec { result } -fn utf16le_to_utf8(source: &[u8]) -> Result> { +fn utf16le_to_utf8(source: &[u8]) -> Result, JsErrorBox> { let ucs2_vec: Vec = source .chunks(2) .map(|chunk| u16::from_le_bytes([chunk[0], chunk[1]])) .collect(); String::from_utf16(&ucs2_vec) .map(|utf8_string| utf8_string.into_bytes()) - .map_err(|e| anyhow!("Invalid UTF-16 sequence: {}", e)) + .map_err(|e| JsErrorBox::generic(format!("Invalid UTF-16 sequence: {}", e))) } -fn utf8_to_utf16le(source: &[u8]) -> Result> { - let utf8_string = std::str::from_utf8(source)?; +fn utf8_to_utf16le(source: &[u8]) -> Result, JsErrorBox> { + let utf8_string = + std::str::from_utf8(source).map_err(JsErrorBox::from_err)?; let ucs2_vec: Vec = utf8_string.encode_utf16().collect(); let bytes: Vec = ucs2_vec.iter().flat_map(|&x| x.to_le_bytes()).collect(); Ok(bytes) diff --git a/ext/node/ops/crypto/cipher.rs b/ext/node/ops/crypto/cipher.rs index 500d8d1b4b95e2..a12f36e04acd80 100644 --- a/ext/node/ops/crypto/cipher.rs +++ b/ext/node/ops/crypto/cipher.rs @@ -47,12 +47,15 @@ pub struct DecipherContext { decipher: Rc>, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CipherContextError { + #[class(type)] #[error("Cipher context is already in use")] ContextInUse, + #[class(inherit)] #[error("{0}")] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Cipher(#[from] CipherError), } @@ -94,12 +97,15 @@ impl CipherContext { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum DecipherContextError { + #[class(type)] #[error("Decipher context is already in use")] ContextInUse, + #[class(inherit)] #[error("{0}")] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Decipher(#[from] DecipherError), } @@ -150,16 +156,21 @@ impl Resource for DecipherContext { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CipherError { + #[class(type)] #[error("IV length must be 12 bytes")] InvalidIvLength, + #[class(range)] #[error("Invalid key length")] InvalidKeyLength, + #[class(type)] #[error("Invalid initialization vector")] InvalidInitializationVector, + #[class(type)] #[error("Cannot pad the input data")] CannotPadInputData, + #[class(type)] #[error("Unknown cipher {0}")] UnknownCipher(String), } @@ -352,22 +363,30 @@ impl Cipher { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum DecipherError { + #[class(type)] #[error("IV length must be 12 bytes")] InvalidIvLength, + #[class(range)] #[error("Invalid key length")] InvalidKeyLength, + #[class(type)] #[error("Invalid initialization vector")] InvalidInitializationVector, + #[class(type)] #[error("Cannot unpad the input data")] CannotUnpadInputData, + #[class(type)] #[error("Failed to authenticate data")] DataAuthenticationFailed, + #[class(type)] #[error("setAutoPadding(false) not supported for Aes128Gcm yet")] SetAutoPaddingFalseAes128GcmUnsupported, + #[class(type)] #[error("setAutoPadding(false) not supported for Aes256Gcm yet")] SetAutoPaddingFalseAes256GcmUnsupported, + #[class(type)] #[error("Unknown cipher {0}")] UnknownCipher(String), } diff --git a/ext/node/ops/crypto/digest.rs b/ext/node/ops/crypto/digest.rs index f9810e8c673bee..5f15dace30c11e 100644 --- a/ext/node/ops/crypto/digest.rs +++ b/ext/node/ops/crypto/digest.rs @@ -183,7 +183,8 @@ pub enum Hash { use Hash::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] pub enum HashError { #[error("Output length mismatch for non-extendable algorithm")] OutputLengthMismatch, diff --git a/ext/node/ops/crypto/keys.rs b/ext/node/ops/crypto/keys.rs index d982d96a8c18ad..79b09faa267a7d 100644 --- a/ext/node/ops/crypto/keys.rs +++ b/ext/node/ops/crypto/keys.rs @@ -4,12 +4,12 @@ use std::borrow::Cow; use std::cell::RefCell; use base64::Engine; -use deno_core::error::type_error; use deno_core::op2; use deno_core::serde_v8::BigInt as V8BigInt; use deno_core::unsync::spawn_blocking; use deno_core::GarbageCollected; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use ed25519_dalek::pkcs8::BitStringRef; use elliptic_curve::JwkEcKey; use num_bigint::BigInt; @@ -375,55 +375,72 @@ impl<'a> TryFrom> for RsaPssParameters<'a> { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum X509PublicKeyError { + #[class(generic)] #[error(transparent)] - X509(#[from] x509_parser::error::X509Error), + X509(#[from] X509Error), + #[class(generic)] #[error(transparent)] Rsa(#[from] rsa::Error), + #[class(generic)] #[error(transparent)] Asn1(#[from] x509_parser::der_parser::asn1_rs::Error), + #[class(generic)] #[error(transparent)] Ec(#[from] elliptic_curve::Error), + #[class(type)] #[error("unsupported ec named curve")] UnsupportedEcNamedCurve, + #[class(type)] #[error("missing ec parameters")] MissingEcParameters, + #[class(type)] #[error("malformed DSS public key")] MalformedDssPublicKey, + #[class(type)] #[error("unsupported x509 public key type")] UnsupportedX509KeyType, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum RsaJwkError { + #[class(generic)] #[error(transparent)] Base64(#[from] base64::DecodeError), + #[class(generic)] #[error(transparent)] Rsa(#[from] rsa::Error), + #[class(type)] #[error("missing RSA private component")] MissingRsaPrivateComponent, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum EcJwkError { + #[class(generic)] #[error(transparent)] Ec(#[from] elliptic_curve::Error), + #[class(type)] #[error("unsupported curve: {0}")] UnsupportedCurve(String), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum EdRawError { + #[class(generic)] #[error(transparent)] Ed25519Signature(#[from] ed25519_dalek::SignatureError), + #[class(type)] #[error("invalid Ed25519 key")] InvalidEd25519Key, + #[class(type)] #[error("unsupported curve")] UnsupportedCurve, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum AsymmetricPrivateKeyError { #[error("invalid PEM private key: not valid utf8 starting at byte {0}")] InvalidPemPrivateKeyInvalidUtf8(usize), @@ -439,8 +456,13 @@ pub enum AsymmetricPrivateKeyError { InvalidSec1PrivateKey, #[error("unsupported PEM label: {0}")] UnsupportedPemLabel(String), + #[class(inherit)] #[error(transparent)] - RsaPssParamsParse(#[from] RsaPssParamsParseError), + RsaPssParamsParse( + #[from] + #[inherit] + RsaPssParamsParseError, + ), #[error("invalid encrypted PKCS#8 private key")] InvalidEncryptedPkcs8PrivateKey, #[error("invalid PKCS#8 private key")] @@ -473,58 +495,96 @@ pub enum AsymmetricPrivateKeyError { UnsupportedPrivateKeyOid, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum AsymmetricPublicKeyError { + #[class(type)] #[error("invalid PEM private key: not valid utf8 starting at byte {0}")] InvalidPemPrivateKeyInvalidUtf8(usize), + #[class(type)] #[error("invalid PEM public key")] InvalidPemPublicKey, + #[class(type)] #[error("invalid PKCS#1 public key")] InvalidPkcs1PublicKey, + #[class(inherit)] #[error(transparent)] - AsymmetricPrivateKey(#[from] AsymmetricPrivateKeyError), + AsymmetricPrivateKey( + #[from] + #[inherit] + AsymmetricPrivateKeyError, + ), + #[class(type)] #[error("invalid x509 certificate")] InvalidX509Certificate, + #[class(generic)] #[error(transparent)] X509(#[from] x509_parser::nom::Err), + #[class(inherit)] #[error(transparent)] - X509PublicKey(#[from] X509PublicKeyError), + X509PublicKey( + #[from] + #[inherit] + X509PublicKeyError, + ), + #[class(type)] #[error("unsupported PEM label: {0}")] UnsupportedPemLabel(String), + #[class(type)] #[error("invalid SPKI public key")] InvalidSpkiPublicKey, + #[class(type)] #[error("unsupported key type: {0}")] UnsupportedKeyType(String), + #[class(type)] #[error("unsupported key format: {0}")] UnsupportedKeyFormat(String), + #[class(generic)] #[error(transparent)] Spki(#[from] spki::Error), + #[class(generic)] #[error(transparent)] Pkcs1(#[from] rsa::pkcs1::Error), + #[class(inherit)] #[error(transparent)] - RsaPssParamsParse(#[from] RsaPssParamsParseError), + RsaPssParamsParse( + #[from] + #[inherit] + RsaPssParamsParseError, + ), + #[class(type)] #[error("malformed DSS public key")] MalformedDssPublicKey, + #[class(type)] #[error("malformed or missing named curve in ec parameters")] MalformedOrMissingNamedCurveInEcParameters, + #[class(type)] #[error("malformed or missing public key in ec spki")] MalformedOrMissingPublicKeyInEcSpki, + #[class(generic)] #[error(transparent)] Ec(#[from] elliptic_curve::Error), + #[class(type)] #[error("unsupported ec named curve")] UnsupportedEcNamedCurve, + #[class(type)] #[error("malformed or missing public key in x25519 spki")] MalformedOrMissingPublicKeyInX25519Spki, + #[class(type)] #[error("x25519 public key is too short")] X25519PublicKeyIsTooShort, + #[class(type)] #[error("invalid Ed25519 public key")] InvalidEd25519PublicKey, + #[class(type)] #[error("missing dh parameters")] MissingDhParameters, + #[class(type)] #[error("malformed dh parameters")] MalformedDhParameters, + #[class(type)] #[error("malformed or missing public key in dh spki")] MalformedOrMissingPublicKeyInDhSpki, + #[class(type)] #[error("unsupported private key oid")] UnsupportedPrivateKeyOid, } @@ -1043,7 +1103,8 @@ impl KeyObjectHandle { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum RsaPssParamsParseError { #[error("malformed pss private key parameters")] MalformedPssPrivateKeyParameters, @@ -1118,7 +1179,8 @@ fn bytes_to_b64(bytes: &[u8]) -> String { BASE64_URL_SAFE_NO_PAD.encode(bytes) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum AsymmetricPrivateKeyJwkError { #[error("key is not an asymmetric private key")] KeyIsNotAsymmetricPrivateKey, @@ -1128,7 +1190,8 @@ pub enum AsymmetricPrivateKeyJwkError { JwkExportNotImplementedForKeyType, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum AsymmetricPublicKeyJwkError { #[error("key is not an asymmetric public key")] KeyIsNotAsymmetricPublicKey, @@ -1138,7 +1201,8 @@ pub enum AsymmetricPublicKeyJwkError { JwkExportNotImplementedForKeyType, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum AsymmetricPublicKeyDerError { #[error("key is not an asymmetric public key")] KeyIsNotAsymmetricPublicKey, @@ -1323,7 +1387,8 @@ impl AsymmetricPublicKey { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum AsymmetricPrivateKeyDerError { #[error("key is not an asymmetric private key")] KeyIsNotAsymmetricPrivateKey, @@ -1335,6 +1400,7 @@ pub enum AsymmetricPrivateKeyDerError { InvalidEcPrivateKey, #[error("exporting non-EC private key as SEC1 is not supported")] ExportingNonEcPrivateKeyAsSec1Unsupported, + #[class(type)] #[error("exporting RSA-PSS private key as PKCS#8 is not supported yet")] ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported, #[error("invalid DSA private key")] @@ -1615,7 +1681,7 @@ pub fn op_node_create_secret_key( #[string] pub fn op_node_get_asymmetric_key_type( #[cppgc] handle: &KeyObjectHandle, -) -> Result<&'static str, deno_core::error::AnyError> { +) -> Result<&'static str, JsErrorBox> { match handle { KeyObjectHandle::AsymmetricPrivate(AsymmetricPrivateKey::Rsa(_)) | KeyObjectHandle::AsymmetricPublic(AsymmetricPublicKey::Rsa(_)) => { @@ -1641,9 +1707,9 @@ pub fn op_node_get_asymmetric_key_type( } KeyObjectHandle::AsymmetricPrivate(AsymmetricPrivateKey::Dh(_)) | KeyObjectHandle::AsymmetricPublic(AsymmetricPublicKey::Dh(_)) => Ok("dh"), - KeyObjectHandle::Secret(_) => { - Err(type_error("symmetric key is not an asymmetric key")) - } + KeyObjectHandle::Secret(_) => Err(JsErrorBox::type_error( + "symmetric key is not an asymmetric key", + )), } } @@ -1686,7 +1752,7 @@ pub enum AsymmetricKeyDetails { #[serde] pub fn op_node_get_asymmetric_key_details( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { match handle { KeyObjectHandle::AsymmetricPrivate(private_key) => match private_key { AsymmetricPrivateKey::Rsa(key) => { @@ -1794,9 +1860,9 @@ pub fn op_node_get_asymmetric_key_details( AsymmetricPublicKey::Ed25519(_) => Ok(AsymmetricKeyDetails::Ed25519), AsymmetricPublicKey::Dh(_) => Ok(AsymmetricKeyDetails::Dh), }, - KeyObjectHandle::Secret(_) => { - Err(type_error("symmetric key is not an asymmetric key")) - } + KeyObjectHandle::Secret(_) => Err(JsErrorBox::type_error( + "symmetric key is not an asymmetric key", + )), } } @@ -1804,12 +1870,12 @@ pub fn op_node_get_asymmetric_key_details( #[smi] pub fn op_node_get_symmetric_key_size( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { match handle { KeyObjectHandle::AsymmetricPrivate(_) - | KeyObjectHandle::AsymmetricPublic(_) => { - Err(type_error("asymmetric key is not a symmetric key")) - } + | KeyObjectHandle::AsymmetricPublic(_) => Err(JsErrorBox::type_error( + "asymmetric key is not a symmetric key", + )), KeyObjectHandle::Secret(key) => Ok(key.len() * 8), } } @@ -1912,7 +1978,8 @@ pub async fn op_node_generate_rsa_key_async( .unwrap() } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] #[error("digest not allowed for RSA-PSS keys{}", .0.as_ref().map(|digest| format!(": {digest}")).unwrap_or_default())] pub struct GenerateRsaPssError(Option); @@ -2016,7 +2083,7 @@ pub async fn op_node_generate_rsa_pss_key_async( fn dsa_generate( modulus_length: usize, divisor_length: usize, -) -> Result { +) -> Result { let mut rng = rand::thread_rng(); use dsa::Components; use dsa::KeySize; @@ -2029,7 +2096,7 @@ fn dsa_generate( (2048, 256) => KeySize::DSA_2048_256, (3072, 256) => KeySize::DSA_3072_256, _ => { - return Err(type_error( + return Err(JsErrorBox::type_error( "Invalid modulusLength+divisorLength combination", )) } @@ -2047,7 +2114,7 @@ fn dsa_generate( pub fn op_node_generate_dsa_key( #[smi] modulus_length: usize, #[smi] divisor_length: usize, -) -> Result { +) -> Result { dsa_generate(modulus_length, divisor_length) } @@ -2056,15 +2123,13 @@ pub fn op_node_generate_dsa_key( pub async fn op_node_generate_dsa_key_async( #[smi] modulus_length: usize, #[smi] divisor_length: usize, -) -> Result { +) -> Result { spawn_blocking(move || dsa_generate(modulus_length, divisor_length)) .await .unwrap() } -fn ec_generate( - named_curve: &str, -) -> Result { +fn ec_generate(named_curve: &str) -> Result { let mut rng = rand::thread_rng(); // TODO(@littledivy): Support public key point encoding. // Default is uncompressed. @@ -2082,7 +2147,7 @@ fn ec_generate( AsymmetricPrivateKey::Ec(EcPrivateKey::P384(key)) } _ => { - return Err(type_error(format!( + return Err(JsErrorBox::type_error(format!( "unsupported named curve: {}", named_curve ))) @@ -2096,7 +2161,7 @@ fn ec_generate( #[cppgc] pub fn op_node_generate_ec_key( #[string] named_curve: &str, -) -> Result { +) -> Result { ec_generate(named_curve) } @@ -2104,7 +2169,7 @@ pub fn op_node_generate_ec_key( #[cppgc] pub async fn op_node_generate_ec_key_async( #[string] named_curve: String, -) -> Result { +) -> Result { spawn_blocking(move || ec_generate(&named_curve)) .await .unwrap() @@ -2160,7 +2225,7 @@ fn u32_slice_to_u8_slice(slice: &[u32]) -> &[u8] { fn dh_group_generate( group_name: &str, -) -> Result { +) -> Result { let (dh, prime, generator) = match group_name { "modp5" => ( dh::DiffieHellman::group::(), @@ -2192,7 +2257,7 @@ fn dh_group_generate( dh::Modp8192::MODULUS, dh::Modp8192::GENERATOR, ), - _ => return Err(type_error("Unsupported group name")), + _ => return Err(JsErrorBox::type_error("Unsupported group name")), }; let params = DhParameter { prime: asn1::Int::new(u32_slice_to_u8_slice(prime)).unwrap(), @@ -2215,7 +2280,7 @@ fn dh_group_generate( #[cppgc] pub fn op_node_generate_dh_group_key( #[string] group_name: &str, -) -> Result { +) -> Result { dh_group_generate(group_name) } @@ -2223,7 +2288,7 @@ pub fn op_node_generate_dh_group_key( #[cppgc] pub async fn op_node_generate_dh_group_key_async( #[string] group_name: String, -) -> Result { +) -> Result { spawn_blocking(move || dh_group_generate(&group_name)) .await .unwrap() @@ -2297,10 +2362,10 @@ pub fn op_node_dh_keys_generate_and_export( #[buffer] pub fn op_node_export_secret_key( #[cppgc] handle: &KeyObjectHandle, -) -> Result, deno_core::error::AnyError> { +) -> Result, JsErrorBox> { let key = handle .as_secret_key() - .ok_or_else(|| type_error("key is not a secret key"))?; + .ok_or_else(|| JsErrorBox::type_error("key is not a secret key"))?; Ok(key.to_vec().into_boxed_slice()) } @@ -2308,10 +2373,10 @@ pub fn op_node_export_secret_key( #[string] pub fn op_node_export_secret_key_b64url( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { let key = handle .as_secret_key() - .ok_or_else(|| type_error("key is not a secret key"))?; + .ok_or_else(|| JsErrorBox::type_error("key is not a secret key"))?; Ok(base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(key)) } @@ -2327,12 +2392,19 @@ pub fn op_node_export_public_key_jwk( public_key.export_jwk() } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ExportPublicKeyPemError { + #[class(inherit)] #[error(transparent)] - AsymmetricPublicKeyDer(#[from] AsymmetricPublicKeyDerError), + AsymmetricPublicKeyDer( + #[from] + #[inherit] + AsymmetricPublicKeyDerError, + ), + #[class(type)] #[error("very large data")] VeryLargeData, + #[class(generic)] #[error(transparent)] Der(#[from] der::Error), } @@ -2377,12 +2449,19 @@ pub fn op_node_export_public_key_der( public_key.export_der(typ) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ExportPrivateKeyPemError { + #[class(inherit)] #[error(transparent)] - AsymmetricPublicKeyDer(#[from] AsymmetricPrivateKeyDerError), + AsymmetricPublicKeyDer( + #[from] + #[inherit] + AsymmetricPrivateKeyDerError, + ), + #[class(type)] #[error("very large data")] VeryLargeData, + #[class(generic)] #[error(transparent)] Der(#[from] der::Error), } @@ -2416,12 +2495,15 @@ pub fn op_node_export_private_key_pem( Ok(String::from_utf8(out).expect("invalid pem is not possible")) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ExportPrivateKeyJwkError { + #[class(inherit)] #[error(transparent)] AsymmetricPublicKeyJwk(#[from] AsymmetricPrivateKeyJwkError), + #[class(type)] #[error("very large data")] VeryLargeData, + #[class(generic)] #[error(transparent)] Der(#[from] der::Error), } @@ -2464,9 +2546,9 @@ pub fn op_node_key_type(#[cppgc] handle: &KeyObjectHandle) -> &'static str { #[cppgc] pub fn op_node_derive_public_key_from_private_key( #[cppgc] handle: &KeyObjectHandle, -) -> Result { +) -> Result { let Some(private_key) = handle.as_private_key() else { - return Err(type_error("expected private key")); + return Err(JsErrorBox::type_error("expected private key")); }; Ok(KeyObjectHandle::AsymmetricPublic( diff --git a/ext/node/ops/crypto/mod.rs b/ext/node/ops/crypto/mod.rs index 19a0bbfc401e4e..8c6b5713165cfd 100644 --- a/ext/node/ops/crypto/mod.rs +++ b/ext/node/ops/crypto/mod.rs @@ -2,14 +2,13 @@ use std::future::Future; use std::rc::Rc; -use deno_core::error::generic_error; -use deno_core::error::type_error; use deno_core::op2; use deno_core::unsync::spawn_blocking; use deno_core::JsBuffer; use deno_core::OpState; use deno_core::StringOrBuffer; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use elliptic_curve::sec1::ToEncodedPoint; use hkdf::Hkdf; use keys::AsymmetricPrivateKey; @@ -141,16 +140,21 @@ pub fn op_node_hash_clone( hasher.clone_inner(output_length.map(|l| l as usize)) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PrivateEncryptDecryptError { + #[class(generic)] #[error(transparent)] Pkcs8(#[from] pkcs8::Error), + #[class(generic)] #[error(transparent)] Spki(#[from] spki::Error), + #[class(generic)] #[error(transparent)] Utf8(#[from] std::str::Utf8Error), + #[class(generic)] #[error(transparent)] Rsa(#[from] rsa::Error), + #[class(type)] #[error("Unknown padding")] UnknownPadding, } @@ -269,10 +273,7 @@ pub fn op_node_cipheriv_final( #[buffer] input: &[u8], #[anybuffer] output: &mut [u8], ) -> Result>, cipher::CipherContextError> { - let context = state - .resource_table - .take::(rid) - .map_err(cipher::CipherContextError::Resource)?; + let context = state.resource_table.take::(rid)?; let context = Rc::try_unwrap(context) .map_err(|_| cipher::CipherContextError::ContextInUse)?; context.r#final(auto_pad, input, output).map_err(Into::into) @@ -284,10 +285,7 @@ pub fn op_node_cipheriv_take( state: &mut OpState, #[smi] rid: u32, ) -> Result>, cipher::CipherContextError> { - let context = state - .resource_table - .take::(rid) - .map_err(cipher::CipherContextError::Resource)?; + let context = state.resource_table.take::(rid)?; let context = Rc::try_unwrap(context) .map_err(|_| cipher::CipherContextError::ContextInUse)?; Ok(context.take_tag()) @@ -339,10 +337,7 @@ pub fn op_node_decipheriv_take( state: &mut OpState, #[smi] rid: u32, ) -> Result<(), cipher::DecipherContextError> { - let context = state - .resource_table - .take::(rid) - .map_err(cipher::DecipherContextError::Resource)?; + let context = state.resource_table.take::(rid)?; Rc::try_unwrap(context) .map_err(|_| cipher::DecipherContextError::ContextInUse)?; Ok(()) @@ -357,10 +352,7 @@ pub fn op_node_decipheriv_final( #[anybuffer] output: &mut [u8], #[buffer] auth_tag: &[u8], ) -> Result<(), cipher::DecipherContextError> { - let context = state - .resource_table - .take::(rid) - .map_err(cipher::DecipherContextError::Resource)?; + let context = state.resource_table.take::(rid)?; let context = Rc::try_unwrap(context) .map_err(|_| cipher::DecipherContextError::ContextInUse)?; context @@ -403,10 +395,12 @@ pub fn op_node_verify( ) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum Pbkdf2Error { + #[class(type)] #[error("unsupported digest: {0}")] UnsupportedDigest(String), + #[class(inherit)] #[error(transparent)] Join(#[from] tokio::task::JoinError), } @@ -475,14 +469,18 @@ pub async fn op_node_fill_random_async(#[smi] len: i32) -> ToJsBuffer { .unwrap() } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum HkdfError { + #[class(type)] #[error("expected secret key")] ExpectedSecretKey, + #[class(type)] #[error("HKDF-Expand failed")] HkdfExpandFailed, + #[class(type)] #[error("Unsupported digest: {0}")] UnsupportedDigest(String), + #[class(inherit)] #[error(transparent)] Join(#[from] tokio::task::JoinError), } @@ -576,7 +574,7 @@ fn scrypt( parallelization: u32, _maxmem: u32, output_buffer: &mut [u8], -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), JsErrorBox> { // Construct Params let params = scrypt::Params::new( cost as u8, @@ -592,7 +590,7 @@ fn scrypt( Ok(()) } else { // TODO(lev): key derivation failed, so what? - Err(generic_error("scrypt key derivation failed")) + Err(JsErrorBox::generic("scrypt key derivation failed")) } } @@ -607,7 +605,7 @@ pub fn op_node_scrypt_sync( #[smi] parallelization: u32, #[smi] maxmem: u32, #[anybuffer] output_buffer: &mut [u8], -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), JsErrorBox> { scrypt( password, salt, @@ -620,12 +618,14 @@ pub fn op_node_scrypt_sync( ) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ScryptAsyncError { + #[class(inherit)] #[error(transparent)] Join(#[from] tokio::task::JoinError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(JsErrorBox), } #[op2(async)] @@ -658,12 +658,15 @@ pub async fn op_node_scrypt_async( .await? } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum EcdhEncodePubKey { + #[class(type)] #[error("Invalid public key")] InvalidPublicKey, + #[class(type)] #[error("Unsupported curve")] UnsupportedCurve, + #[class(generic)] #[error(transparent)] Sec1(#[from] sec1::Error), } @@ -743,7 +746,7 @@ pub fn op_node_ecdh_generate_keys( #[buffer] pubbuf: &mut [u8], #[buffer] privbuf: &mut [u8], #[string] format: &str, -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), JsErrorBox> { let mut rng = rand::thread_rng(); let compress = format == "compressed"; match curve { @@ -780,7 +783,10 @@ pub fn op_node_ecdh_generate_keys( Ok(()) } - &_ => Err(type_error(format!("Unsupported curve: {}", curve))), + &_ => Err(JsErrorBox::type_error(format!( + "Unsupported curve: {}", + curve + ))), } } @@ -913,7 +919,8 @@ pub async fn op_node_gen_prime_async( spawn_blocking(move || gen_prime(size)).await } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum DiffieHellmanError { #[error("Expected private key")] ExpectedPrivateKey, @@ -1005,7 +1012,8 @@ pub fn op_node_diffie_hellman( Ok(res) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum SignEd25519Error { #[error("Expected private key")] ExpectedPrivateKey, @@ -1037,7 +1045,8 @@ pub fn op_node_sign_ed25519( Ok(()) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum VerifyEd25519Error { #[error("Expected public key")] ExpectedPublicKey, diff --git a/ext/node/ops/crypto/sign.rs b/ext/node/ops/crypto/sign.rs index e7c15d16b755d9..74ed50eb2b2ef5 100644 --- a/ext/node/ops/crypto/sign.rs +++ b/ext/node/ops/crypto/sign.rs @@ -39,7 +39,8 @@ where } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] pub enum KeyObjectHandlePrehashedSignAndVerifyError { #[error("invalid DSA signature encoding")] InvalidDsaSignatureEncoding, @@ -47,10 +48,12 @@ pub enum KeyObjectHandlePrehashedSignAndVerifyError { KeyIsNotPrivate, #[error("digest not allowed for RSA signature: {0}")] DigestNotAllowedForRsaSignature(String), + #[class(generic)] #[error("failed to sign digest with RSA")] FailedToSignDigestWithRsa, #[error("digest not allowed for RSA-PSS signature: {0}")] DigestNotAllowedForRsaPssSignature(String), + #[class(generic)] #[error("failed to sign digest with RSA-PSS")] FailedToSignDigestWithRsaPss, #[error("failed to sign digest with DSA")] diff --git a/ext/node/ops/crypto/x509.rs b/ext/node/ops/crypto/x509.rs index 23b19720e33fed..ad931f01ff734d 100644 --- a/ext/node/ops/crypto/x509.rs +++ b/ext/node/ops/crypto/x509.rs @@ -59,11 +59,13 @@ impl<'a> Deref for CertificateView<'a> { } } +deno_error::js_error_wrapper!(X509Error, JsX509Error, "Error"); + #[op2] #[cppgc] pub fn op_node_x509_parse( #[buffer] buf: &[u8], -) -> Result { +) -> Result { let source = match pem::parse_x509_pem(buf) { Ok((_, pem)) => CertificateSources::Pem(pem), Err(_) => CertificateSources::Der(buf.to_vec().into_boxed_slice()), @@ -154,18 +156,18 @@ pub fn op_node_x509_fingerprint512( #[string] pub fn op_node_x509_get_issuer( #[cppgc] cert: &Certificate, -) -> Result { +) -> Result { let cert = cert.inner.get().deref(); - x509name_to_string(cert.issuer(), oid_registry()) + x509name_to_string(cert.issuer(), oid_registry()).map_err(Into::into) } #[op2] #[string] pub fn op_node_x509_get_subject( #[cppgc] cert: &Certificate, -) -> Result { +) -> Result { let cert = cert.inner.get().deref(); - x509name_to_string(cert.subject(), oid_registry()) + x509name_to_string(cert.subject(), oid_registry()).map_err(Into::into) } #[op2] diff --git a/ext/node/ops/fs.rs b/ext/node/ops/fs.rs index 73db3ea2e562fa..0e9310375c7a32 100644 --- a/ext/node/ops/fs.rs +++ b/ext/node/ops/fs.rs @@ -10,27 +10,40 @@ use serde::Serialize; use crate::NodePermissions; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum FsError { + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error("{0}")] - Io(#[from] std::io::Error), + Io( + #[from] + #[inherit] + std::io::Error, + ), #[cfg(windows)] + #[class(generic)] #[error("Path has no root.")] PathHasNoRoot, #[cfg(not(any(unix, windows)))] + #[class(generic)] #[error("Unsupported platform.")] UnsupportedPlatform, + #[class(inherit)] #[error(transparent)] - Fs(#[from] deno_io::fs::FsError), + Fs( + #[from] + #[inherit] + deno_io::fs::FsError, + ), } #[op2(fast, stack_trace)] pub fn op_node_fs_exists_sync

( state: &mut OpState, #[string] path: String, -) -> Result +) -> Result where P: NodePermissions + 'static, { diff --git a/ext/node/ops/http.rs b/ext/node/ops/http.rs index ad6217b6a68132..9723b0d3be6685 100644 --- a/ext/node/ops/http.rs +++ b/ext/node/ops/http.rs @@ -10,8 +10,7 @@ use std::task::Context; use std::task::Poll; use bytes::Bytes; -use deno_core::error::bad_resource; -use deno_core::error::type_error; +use deno_core::error::ResourceError; use deno_core::futures::stream::Peekable; use deno_core::futures::Future; use deno_core::futures::FutureExt; @@ -33,6 +32,8 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::JsError; +use deno_error::JsErrorBox; use deno_fetch::FetchCancelHandle; use deno_fetch::FetchReturn; use deno_fetch::ResBody; @@ -88,32 +89,45 @@ impl deno_core::Resource for NodeHttpClientResponse { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, JsError)] pub enum ConnError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(ResourceError), + #[class(inherit)] #[error(transparent)] Permission(#[from] PermissionCheckError), + #[class(type)] #[error("Invalid URL {0}")] InvalidUrl(Url), + #[class(type)] #[error(transparent)] InvalidHeaderName(#[from] http::header::InvalidHeaderName), + #[class(type)] #[error(transparent)] InvalidHeaderValue(#[from] http::header::InvalidHeaderValue), + #[class(inherit)] #[error(transparent)] Url(#[from] url::ParseError), + #[class(type)] #[error(transparent)] Method(#[from] http::method::InvalidMethod), + #[class(inherit)] #[error(transparent)] Io(#[from] std::io::Error), + #[class("Busy")] #[error("TLS stream is currently in use")] TlsStreamBusy, + #[class("Busy")] #[error("TCP stream is currently in use")] TcpStreamBusy, + #[class(generic)] #[error(transparent)] ReuniteTcp(#[from] tokio::net::tcp::ReuniteError), + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), + #[class("Http")] #[error(transparent)] Hyper(#[from] hyper::Error), } @@ -274,8 +288,11 @@ pub async fn op_node_http_await_response( .resource_table .take::(rid) .map_err(ConnError::Resource)?; - let resource = Rc::try_unwrap(resource) - .map_err(|_| ConnError::Resource(bad_resource("NodeHttpClientResponse")))?; + let resource = Rc::try_unwrap(resource).map_err(|_| { + ConnError::Resource(ResourceError::Other( + "NodeHttpClientResponse".to_string(), + )) + })?; let res = resource.response.await??; let status = res.status(); @@ -296,7 +313,7 @@ pub async fn op_node_http_await_response( }; let (parts, body) = res.into_parts(); - let body = body.map_err(deno_core::anyhow::Error::from); + let body = body.map_err(|e| JsErrorBox::new("Http", e.to_string())); let body = body.boxed(); let res = http::Response::from_parts(parts, body); @@ -523,7 +540,7 @@ impl Resource for NodeHttpResponseResource { // safely call `await` on it without creating a race condition. Some(_) => match reader.as_mut().next().await.unwrap() { Ok(chunk) => assert!(chunk.is_empty()), - Err(err) => break Err(type_error(err.to_string())), + Err(err) => break Err(JsErrorBox::type_error(err.to_string())), }, None => break Ok(BufView::empty()), } @@ -547,9 +564,7 @@ impl Resource for NodeHttpResponseResource { #[allow(clippy::type_complexity)] pub struct NodeHttpResourceToBodyAdapter( Rc, - Option< - Pin>>>, - >, + Option>>>>, ); impl NodeHttpResourceToBodyAdapter { @@ -565,7 +580,7 @@ unsafe impl Send for NodeHttpResourceToBodyAdapter {} unsafe impl Sync for NodeHttpResourceToBodyAdapter {} impl Stream for NodeHttpResourceToBodyAdapter { - type Item = Result; + type Item = Result; fn poll_next( self: Pin<&mut Self>, @@ -596,7 +611,7 @@ impl Stream for NodeHttpResourceToBodyAdapter { impl hyper::body::Body for NodeHttpResourceToBodyAdapter { type Data = Bytes; - type Error = deno_core::anyhow::Error; + type Error = JsErrorBox; fn poll_frame( self: Pin<&mut Self>, diff --git a/ext/node/ops/http2.rs b/ext/node/ops/http2.rs index c6c6484477ff00..2308ca82540ec0 100644 --- a/ext/node/ops/http2.rs +++ b/ext/node/ops/http2.rs @@ -7,6 +7,7 @@ use std::rc::Rc; use std::task::Poll; use bytes::Bytes; +use deno_core::error::ResourceError; use deno_core::futures::future::poll_fn; use deno_core::op2; use deno_core::serde::Serialize; @@ -109,14 +110,32 @@ impl Resource for Http2ServerSendResponse { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum Http2Error { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + ResourceError, + ), + #[class(inherit)] #[error(transparent)] - UrlParse(#[from] url::ParseError), + UrlParse( + #[from] + #[inherit] + url::ParseError, + ), + #[class(generic)] #[error(transparent)] H2(#[from] h2::Error), + #[class(inherit)] + #[error(transparent)] + TakeNetworkStream( + #[from] + #[inherit] + deno_net::raw::TakeNetworkStreamError, + ), } #[op2(async)] @@ -129,8 +148,7 @@ pub async fn op_http2_connect( // No permission check necessary because we're using an existing connection let network_stream = { let mut state = state.borrow_mut(); - take_network_stream_resource(&mut state.resource_table, rid) - .map_err(Http2Error::Resource)? + take_network_stream_resource(&mut state.resource_table, rid)? }; let url = Url::parse(&url)?; @@ -156,8 +174,7 @@ pub async fn op_http2_listen( #[smi] rid: ResourceId, ) -> Result { let stream = - take_network_stream_resource(&mut state.borrow_mut().resource_table, rid) - .map_err(Http2Error::Resource)?; + take_network_stream_resource(&mut state.borrow_mut().resource_table, rid)?; let conn = h2::server::Builder::new().handshake(stream).await?; Ok( @@ -182,8 +199,7 @@ pub async fn op_http2_accept( let resource = state .borrow() .resource_table - .get::(rid) - .map_err(Http2Error::Resource)?; + .get::(rid)?; let mut conn = RcRef::map(&resource, |r| &r.conn).borrow_mut().await; if let Some(res) = conn.accept().await { let (req, resp) = res?; @@ -249,8 +265,7 @@ pub async fn op_http2_send_response( let resource = state .borrow() .resource_table - .get::(rid) - .map_err(Http2Error::Resource)?; + .get::(rid)?; let mut send_response = RcRef::map(resource, |r| &r.send_response) .borrow_mut() .await; @@ -276,11 +291,7 @@ pub async fn op_http2_poll_client_connection( state: Rc>, #[smi] rid: ResourceId, ) -> Result<(), Http2Error> { - let resource = state - .borrow() - .resource_table - .get::(rid) - .map_err(Http2Error::Resource)?; + let resource = state.borrow().resource_table.get::(rid)?; let cancel_handle = RcRef::map(resource.clone(), |this| &this.cancel_handle); let mut conn = RcRef::map(resource, |this| &this.conn).borrow_mut().await; @@ -310,8 +321,7 @@ pub async fn op_http2_client_request( let resource = state .borrow() .resource_table - .get::(client_rid) - .map_err(Http2Error::Resource)?; + .get::(client_rid)?; let url = resource.url.clone(); @@ -344,10 +354,7 @@ pub async fn op_http2_client_request( let resource = { let state = state.borrow(); - state - .resource_table - .get::(client_rid) - .map_err(Http2Error::Resource)? + state.resource_table.get::(client_rid)? }; let mut client = RcRef::map(&resource, |r| &r.client).borrow_mut().await; poll_fn(|cx| client.poll_ready(cx)).await?; @@ -370,8 +377,7 @@ pub async fn op_http2_client_send_data( let resource = state .borrow() .resource_table - .get::(stream_rid) - .map_err(Http2Error::Resource)?; + .get::(stream_rid)?; let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await; stream.send_data(data.to_vec().into(), end_of_stream)?; @@ -383,7 +389,7 @@ pub async fn op_http2_client_reset_stream( state: Rc>, #[smi] stream_rid: ResourceId, #[smi] code: u32, -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), ResourceError> { let resource = state .borrow() .resource_table @@ -402,8 +408,7 @@ pub async fn op_http2_client_send_trailers( let resource = state .borrow() .resource_table - .get::(stream_rid) - .map_err(Http2Error::Resource)?; + .get::(stream_rid)?; let mut stream = RcRef::map(&resource, |r| &r.stream).borrow_mut().await; let mut trailers_map = http::HeaderMap::new(); @@ -435,8 +440,7 @@ pub async fn op_http2_client_get_response( let resource = state .borrow() .resource_table - .get::(stream_rid) - .map_err(Http2Error::Resource)?; + .get::(stream_rid)?; let mut response_future = RcRef::map(&resource, |r| &r.response).borrow_mut().await; @@ -506,8 +510,7 @@ pub async fn op_http2_client_get_response_body_chunk( let resource = state .borrow() .resource_table - .get::(body_rid) - .map_err(Http2Error::Resource)?; + .get::(body_rid)?; let mut body = RcRef::map(&resource, |r| &r.body).borrow_mut().await; loop { @@ -550,7 +553,7 @@ pub async fn op_http2_client_get_response_body_chunk( pub async fn op_http2_client_get_response_trailers( state: Rc>, #[smi] body_rid: ResourceId, -) -> Result>, deno_core::error::AnyError> { +) -> Result>, ResourceError> { let resource = state .borrow() .resource_table diff --git a/ext/node/ops/idna.rs b/ext/node/ops/idna.rs index 4ae1ce3954adae..24bcb97c633dad 100644 --- a/ext/node/ops/idna.rs +++ b/ext/node/ops/idna.rs @@ -9,16 +9,21 @@ use deno_core::op2; const PUNY_PREFIX: &str = "xn--"; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum IdnaError { + #[class(range)] #[error("Invalid input")] InvalidInput, + #[class(generic)] #[error("Input would take more than 63 characters to encode")] InputTooLong, + #[class(range)] #[error("Illegal input >= 0x80 (not a basic code point)")] IllegalInput, } +deno_error::js_error_wrapper!(idna::Errors, JsIdnaErrors, "Error"); + /// map a domain by mapping each label with the given function fn map_domain( domain: &str, @@ -113,8 +118,8 @@ pub fn op_node_idna_punycode_to_unicode( #[string] pub fn op_node_idna_domain_to_ascii( #[string] domain: String, -) -> Result { - idna::domain_to_ascii(&domain) +) -> Result { + idna::domain_to_ascii(&domain).map_err(Into::into) } /// Converts a domain to Unicode as per the IDNA spec diff --git a/ext/node/ops/inspector.rs b/ext/node/ops/inspector.rs index 03cfed4592333f..c462523715890d 100644 --- a/ext/node/ops/inspector.rs +++ b/ext/node/ops/inspector.rs @@ -3,8 +3,6 @@ use std::cell::RefCell; use std::rc::Rc; -use deno_core::anyhow::Error; -use deno_core::error::generic_error; use deno_core::futures::channel::mpsc; use deno_core::op2; use deno_core::v8; @@ -13,6 +11,7 @@ use deno_core::InspectorSessionKind; use deno_core::InspectorSessionOptions; use deno_core::JsRuntimeInspector; use deno_core::OpState; +use deno_error::JsErrorBox; use crate::NodePermissions; @@ -27,7 +26,7 @@ pub fn op_inspector_open

( _state: &mut OpState, _port: Option, #[string] _host: Option, -) -> Result<(), Error> +) -> Result<(), JsErrorBox> where P: NodePermissions + 'static, { @@ -87,6 +86,20 @@ struct JSInspectorSession { impl GarbageCollected for JSInspectorSession {} +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum InspectorConnectError { + #[class(inherit)] + #[error(transparent)] + Permission( + #[from] + #[inherit] + deno_permissions::PermissionCheckError, + ), + #[class(generic)] + #[error("connectToMainThread not supported")] + ConnectToMainThreadUnsupported, +} + #[op2(stack_trace)] #[cppgc] pub fn op_inspector_connect<'s, P>( @@ -95,7 +108,7 @@ pub fn op_inspector_connect<'s, P>( state: &mut OpState, connect_to_main_thread: bool, callback: v8::Local<'s, v8::Function>, -) -> Result +) -> Result where P: NodePermissions + 'static, { @@ -104,7 +117,7 @@ where .check_sys("inspector", "inspector.Session.connect")?; if connect_to_main_thread { - return Err(generic_error("connectToMainThread not supported")); + return Err(InspectorConnectError::ConnectToMainThreadUnsupported); } let context = scope.get_current_context(); diff --git a/ext/node/ops/ipc.rs b/ext/node/ops/ipc.rs index 0eb3ae6aae162a..cf5e1e97efc3d8 100644 --- a/ext/node/ops/ipc.rs +++ b/ext/node/ops/ipc.rs @@ -30,6 +30,7 @@ mod impl_ { use deno_core::RcRef; use deno_core::ResourceId; use deno_core::ToV8; + use deno_error::JsErrorBox; use deno_io::BiPipe; use deno_io::BiPipeRead; use deno_io::BiPipeWrite; @@ -79,7 +80,7 @@ mod impl_ { } else if value.is_string_object() { let str = deno_core::serde_v8::to_utf8( value.to_string(scope).ok_or_else(|| { - S::Error::custom(deno_core::error::generic_error( + S::Error::custom(deno_error::JsErrorBox::generic( "toString on string object failed", )) })?, @@ -152,7 +153,7 @@ mod impl_ { map.end() } else { // TODO(nathanwhit): better error message - Err(S::Error::custom(deno_core::error::type_error(format!( + Err(S::Error::custom(JsErrorBox::type_error(format!( "Unsupported type: {}", value.type_repr() )))) @@ -177,14 +178,18 @@ mod impl_ { )) } - #[derive(Debug, thiserror::Error)] + #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum IpcError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] IpcJsonStream(#[from] IpcJsonStreamError), + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), + #[class(inherit)] #[error("failed to serialize json value: {0}")] SerdeJson(serde_json::Error), } @@ -210,8 +215,7 @@ mod impl_ { let stream = state .borrow() .resource_table - .get::(rid) - .map_err(IpcError::Resource)?; + .get::(rid)?; let old = stream .queued_bytes .fetch_add(serialized.len(), std::sync::atomic::Ordering::Relaxed); @@ -255,8 +259,7 @@ mod impl_ { let stream = state .borrow() .resource_table - .get::(rid) - .map_err(IpcError::Resource)?; + .get::(rid)?; let cancel = stream.cancel.clone(); let mut stream = RcRef::map(stream, |r| &r.read_half).borrow_mut().await; @@ -467,10 +470,12 @@ mod impl_ { } } - #[derive(Debug, thiserror::Error)] + #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum IpcJsonStreamError { + #[class(inherit)] #[error("{0}")] Io(#[source] std::io::Error), + #[class(generic)] #[error("{0}")] SimdJson(#[source] simd_json::Error), } diff --git a/ext/node/ops/os/mod.rs b/ext/node/ops/os/mod.rs index 944f9506079b3c..ad0be8200e4da6 100644 --- a/ext/node/ops/os/mod.rs +++ b/ext/node/ops/os/mod.rs @@ -4,6 +4,7 @@ use std::mem::MaybeUninit; use deno_core::op2; use deno_core::OpState; +use deno_permissions::PermissionCheckError; use sys_traits::EnvHomeDir; use crate::NodePermissions; @@ -11,16 +12,28 @@ use crate::NodePermissions; mod cpus; pub mod priority; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum OsError { + #[class(inherit)] #[error(transparent)] - Priority(priority::PriorityError), + Priority(#[inherit] priority::PriorityError), + #[class(inherit)] #[error(transparent)] - Permission(#[from] deno_permissions::PermissionCheckError), + Permission( + #[from] + #[inherit] + PermissionCheckError, + ), + #[class(type)] #[error("Failed to get cpu info")] FailedToGetCpuInfo, + #[class(inherit)] #[error("Failed to get user info")] - FailedToGetUserInfo(#[source] std::io::Error), + FailedToGetUserInfo( + #[source] + #[inherit] + std::io::Error, + ), } #[op2(fast, stack_trace)] @@ -215,9 +228,7 @@ where } #[op2(fast, stack_trace)] -pub fn op_geteuid

( - state: &mut OpState, -) -> Result +pub fn op_geteuid

(state: &mut OpState) -> Result where P: NodePermissions + 'static, { @@ -236,9 +247,7 @@ where } #[op2(fast, stack_trace)] -pub fn op_getegid

( - state: &mut OpState, -) -> Result +pub fn op_getegid

(state: &mut OpState) -> Result where P: NodePermissions + 'static, { @@ -274,7 +283,7 @@ where #[string] pub fn op_homedir

( state: &mut OpState, -) -> Result, deno_core::error::AnyError> +) -> Result, PermissionCheckError> where P: NodePermissions + 'static, { diff --git a/ext/node/ops/os/priority.rs b/ext/node/ops/os/priority.rs index f9e686ceba3fab..10640e4942be71 100644 --- a/ext/node/ops/os/priority.rs +++ b/ext/node/ops/os/priority.rs @@ -2,11 +2,13 @@ pub use impl_::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PriorityError { + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), #[cfg(windows)] + #[class(type)] #[error("Invalid priority")] InvalidPriority, } diff --git a/ext/node/ops/perf_hooks.rs b/ext/node/ops/perf_hooks.rs index eca5fe2fa8531b..9c0fd01385c00d 100644 --- a/ext/node/ops/perf_hooks.rs +++ b/ext/node/ops/perf_hooks.rs @@ -5,8 +5,9 @@ use std::cell::Cell; use deno_core::op2; use deno_core::GarbageCollected; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PerfHooksError { + #[class(generic)] #[error(transparent)] TokioEld(#[from] tokio_eld::Error), } diff --git a/ext/node/ops/process.rs b/ext/node/ops/process.rs index 0ef360af8c9660..f28e45243768a3 100644 --- a/ext/node/ops/process.rs +++ b/ext/node/ops/process.rs @@ -50,7 +50,7 @@ pub fn op_node_process_kill( state: &mut OpState, #[smi] pid: i32, #[smi] sig: i32, -) -> Result { +) -> Result { state .borrow_mut::() .check_run_all("process.kill")?; diff --git a/ext/node/ops/require.rs b/ext/node/ops/require.rs index 218079d4e1a0fa..3135ba1e864a92 100644 --- a/ext/node/ops/require.rs +++ b/ext/node/ops/require.rs @@ -7,13 +7,13 @@ use std::path::PathBuf; use std::rc::Rc; use boxed_error::Boxed; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::Url; use deno_core::v8; use deno_core::FastString; use deno_core::JsRuntimeInspector; use deno_core::OpState; +use deno_error::JsErrorBox; use deno_package_json::PackageJsonRc; use deno_path_util::normalize_path; use deno_path_util::url_from_file_path; @@ -37,7 +37,7 @@ use crate::PackageJsonResolverRc; fn ensure_read_permission<'a, P>( state: &mut OpState, file_path: &'a Path, -) -> Result, deno_core::error::AnyError> +) -> Result, JsErrorBox> where P: NodePermissions + 'static, { @@ -46,37 +46,67 @@ where loader.ensure_read_permission(permissions, file_path) } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, deno_error::JsError)] pub struct RequireError(pub Box); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum RequireErrorKind { + #[class(inherit)] #[error(transparent)] - UrlParse(#[from] url::ParseError), + UrlParse( + #[from] + #[inherit] + url::ParseError, + ), + #[class(inherit)] #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(#[inherit] JsErrorBox), + #[class(generic)] #[error(transparent)] PackageExportsResolve( #[from] node_resolver::errors::PackageExportsResolveError, ), + #[class(generic)] #[error(transparent)] PackageJsonLoad(#[from] node_resolver::errors::PackageJsonLoadError), + #[class(generic)] #[error(transparent)] - ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError), + ClosestPkgJson(#[from] ClosestPkgJsonError), + #[class(generic)] #[error(transparent)] PackageImportsResolve( #[from] node_resolver::errors::PackageImportsResolveError, ), + #[class(generic)] #[error(transparent)] FilePathConversion(#[from] deno_path_util::UrlToFilePathError), + #[class(generic)] #[error(transparent)] UrlConversion(#[from] deno_path_util::PathToUrlError), + #[class(inherit)] #[error(transparent)] - Fs(#[from] std::io::Error), + Fs( + #[from] + #[inherit] + deno_io::fs::FsError, + ), + #[class(inherit)] #[error(transparent)] - ReadModule(deno_core::error::AnyError), + Io( + #[from] + #[inherit] + std::io::Error, + ), + #[class(inherit)] + #[error(transparent)] + ReadModule( + #[from] + #[inherit] + JsErrorBox, + ), + #[class(inherit)] #[error("Unable to get CWD: {0}")] - UnableToGetCwd(std::io::Error), + UnableToGetCwd(#[inherit] std::io::Error), } #[op2] @@ -230,8 +260,9 @@ pub fn op_require_resolve_deno_dir( state: &mut OpState, #[string] request: String, #[string] parent_filename: String, -) -> Result, AnyError> { +) -> Result, deno_path_util::PathToUrlError> { let resolver = state.borrow::(); + Ok( resolver .resolve_package_folder_from_package( @@ -309,7 +340,7 @@ pub fn op_require_stat< >( state: &mut OpState, #[string] path: String, -) -> Result { +) -> Result { let path = PathBuf::from(path); let path = ensure_read_permission::

(state, &path)?; let sys = state.borrow::(); @@ -337,8 +368,9 @@ pub fn op_require_real_path< let path = ensure_read_permission::

(state, &path) .map_err(RequireErrorKind::Permission)?; let sys = state.borrow::(); - let canonicalized_path = - deno_path_util::strip_unc_prefix(sys.fs_canonicalize(&path)?); + let canonicalized_path = deno_path_util::strip_unc_prefix( + sys.fs_canonicalize(&path).map_err(RequireErrorKind::Io)?, + ); Ok(canonicalized_path.to_string_lossy().into_owned()) } @@ -362,14 +394,12 @@ pub fn op_require_path_resolve(#[serde] parts: Vec) -> String { #[string] pub fn op_require_path_dirname( #[string] request: String, -) -> Result { +) -> Result { let p = PathBuf::from(request); if let Some(parent) = p.parent() { Ok(parent.to_string_lossy().into_owned()) } else { - Err(deno_core::error::generic_error( - "Path doesn't have a parent", - )) + Err(JsErrorBox::generic("Path doesn't have a parent")) } } @@ -377,14 +407,12 @@ pub fn op_require_path_dirname( #[string] pub fn op_require_path_basename( #[string] request: String, -) -> Result { +) -> Result { let p = PathBuf::from(request); if let Some(path) = p.file_name() { Ok(path.to_string_lossy().into_owned()) } else { - Err(deno_core::error::generic_error( - "Path doesn't have a file name", - )) + Err(JsErrorBox::generic("Path doesn't have a file name")) } } @@ -398,7 +426,7 @@ pub fn op_require_try_self_parent_path< has_parent: bool, #[string] maybe_parent_filename: Option, #[string] maybe_parent_id: Option, -) -> Result, deno_core::error::AnyError> { +) -> Result, JsErrorBox> { if !has_parent { return Ok(None); } @@ -583,17 +611,23 @@ pub fn op_require_resolve_exports< })) } +deno_error::js_error_wrapper!( + ClosestPkgJsonError, + JsClosestPkgJsonError, + "Error" +); + #[op2(fast)] pub fn op_require_is_maybe_cjs( state: &mut OpState, #[string] filename: String, -) -> Result { +) -> Result { let filename = PathBuf::from(filename); let Ok(url) = url_from_file_path(&filename) else { return Ok(false); }; let loader = state.borrow::(); - loader.is_maybe_cjs(&url) + loader.is_maybe_cjs(&url).map_err(Into::into) } #[op2(stack_trace)] diff --git a/ext/node/ops/util.rs b/ext/node/ops/util.rs index bc1d2c0588e12d..1af4f7edbd9206 100644 --- a/ext/node/ops/util.rs +++ b/ext/node/ops/util.rs @@ -21,7 +21,7 @@ enum HandleType { pub fn op_node_guess_handle_type( state: &mut OpState, rid: u32, -) -> Result { +) -> Result { let handle = state.resource_table.get_handle(rid)?; let handle_type = match handle { diff --git a/ext/node/ops/v8.rs b/ext/node/ops/v8.rs index 8f4a70dccf405c..c268d419258bc8 100644 --- a/ext/node/ops/v8.rs +++ b/ext/node/ops/v8.rs @@ -7,6 +7,7 @@ use deno_core::v8; use deno_core::FastString; use deno_core::GarbageCollected; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use v8::ValueDeserializerHelper; use v8::ValueSerializerHelper; @@ -274,13 +275,11 @@ pub fn op_v8_new_deserializer( scope: &mut v8::HandleScope, obj: v8::Local, buffer: v8::Local, -) -> Result, deno_core::error::AnyError> { +) -> Result, JsErrorBox> { let offset = buffer.byte_offset(); let len = buffer.byte_length(); let backing_store = buffer.get_backing_store().ok_or_else(|| { - deno_core::error::generic_error( - "deserialization buffer has no backing store", - ) + JsErrorBox::generic("deserialization buffer has no backing store") })?; let (buf_slice, buf_ptr) = if let Some(data) = backing_store.data() { // SAFETY: the offset is valid for the underlying buffer because we're getting it directly from v8 @@ -322,10 +321,10 @@ pub fn op_v8_transfer_array_buffer_de( #[op2(fast)] pub fn op_v8_read_double( #[cppgc] deser: &Deserializer, -) -> Result { +) -> Result { let mut double = 0f64; if !deser.inner.read_double(&mut double) { - return Err(deno_core::error::type_error("ReadDouble() failed")); + return Err(JsErrorBox::type_error("ReadDouble() failed")); } Ok(double) } @@ -360,10 +359,10 @@ pub fn op_v8_read_raw_bytes( #[op2(fast)] pub fn op_v8_read_uint32( #[cppgc] deser: &Deserializer, -) -> Result { +) -> Result { let mut value = 0; if !deser.inner.read_uint32(&mut value) { - return Err(deno_core::error::type_error("ReadUint32() failed")); + return Err(JsErrorBox::type_error("ReadUint32() failed")); } Ok(value) @@ -373,10 +372,10 @@ pub fn op_v8_read_uint32( #[serde] pub fn op_v8_read_uint64( #[cppgc] deser: &Deserializer, -) -> Result<(u32, u32), deno_core::error::AnyError> { +) -> Result<(u32, u32), JsErrorBox> { let mut val = 0; if !deser.inner.read_uint64(&mut val) { - return Err(deno_core::error::type_error("ReadUint64() failed")); + return Err(JsErrorBox::type_error("ReadUint64() failed")); } Ok(((val >> 32) as u32, val as u32)) diff --git a/ext/node/ops/vm_internal.rs b/ext/node/ops/vm_internal.rs index e8c1cc02f02ba9..2219d05cd0c1b2 100644 --- a/ext/node/ops/vm_internal.rs +++ b/ext/node/ops/vm_internal.rs @@ -1,9 +1,8 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::v8; use deno_core::v8::MapFnTo; +use deno_error::JsErrorBox; use crate::create_host_defined_options; @@ -20,7 +19,7 @@ impl ContextifyScript { pub fn new( scope: &mut v8::HandleScope, source_str: v8::Local, - ) -> Result { + ) -> Result { let resource_name = v8::undefined(scope); let host_defined_options = create_host_defined_options(scope); let origin = v8::ScriptOrigin::new( @@ -45,7 +44,7 @@ impl ContextifyScript { v8::script_compiler::CompileOptions::NoCompileOptions, v8::script_compiler::NoCacheReason::NoReason, ) - .ok_or_else(|| type_error("Failed to compile script"))?; + .ok_or_else(|| JsErrorBox::type_error("Failed to compile script"))?; let script = v8::Global::new(scope, unbound_script); Ok(Self { script }) } diff --git a/ext/node/ops/worker_threads.rs b/ext/node/ops/worker_threads.rs index f7aa8c71cbea9e..ae3c28ef35a23d 100644 --- a/ext/node/ops/worker_threads.rs +++ b/ext/node/ops/worker_threads.rs @@ -7,6 +7,7 @@ use std::path::PathBuf; use deno_core::op2; use deno_core::url::Url; use deno_core::OpState; +use deno_error::JsErrorBox; use sys_traits::FsCanonicalize; use sys_traits::FsMetadata; @@ -18,7 +19,7 @@ use crate::NodeRequireLoaderRc; fn ensure_read_permission<'a, P>( state: &mut OpState, file_path: &'a Path, -) -> Result, deno_core::error::AnyError> +) -> Result, JsErrorBox> where P: NodePermissions + 'static, { @@ -27,24 +28,47 @@ where loader.ensure_read_permission(permissions, file_path) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum WorkerThreadsFilenameError { + #[class(inherit)] #[error(transparent)] - Permission(deno_core::error::AnyError), + Permission(JsErrorBox), + #[class(inherit)] #[error("{0}")] - UrlParse(#[from] url::ParseError), + UrlParse( + #[from] + #[inherit] + url::ParseError, + ), + #[class(generic)] #[error("Relative path entries must start with '.' or '..'")] InvalidRelativeUrl, + #[class(generic)] #[error("URL from Path-String")] UrlFromPathString, + #[class(generic)] #[error("URL to Path-String")] UrlToPathString, + #[class(generic)] #[error("URL to Path")] UrlToPath, + #[class(generic)] #[error("File not found [{0:?}]")] FileNotFound(PathBuf), + #[class(inherit)] #[error(transparent)] - Fs(#[from] std::io::Error), + Fs( + #[from] + #[inherit] + deno_io::fs::FsError, + ), + #[class(inherit)] + #[error(transparent)] + Io( + #[from] + #[inherit] + std::io::Error, + ), } // todo(dsherret): we should remove this and do all this work inside op_create_worker diff --git a/ext/node/ops/zlib/brotli.rs b/ext/node/ops/zlib/brotli.rs index 5d15df559f7ce3..5e4c1d16e68f96 100644 --- a/ext/node/ops/zlib/brotli.rs +++ b/ext/node/ops/zlib/brotli.rs @@ -18,20 +18,34 @@ use deno_core::OpState; use deno_core::Resource; use deno_core::ToJsBuffer; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BrotliError { + #[class(type)] #[error("Invalid encoder mode")] InvalidEncoderMode, + #[class(type)] #[error("Failed to compress")] CompressFailed, + #[class(type)] #[error("Failed to decompress")] DecompressFailed, + #[class(inherit)] #[error(transparent)] - Join(#[from] tokio::task::JoinError), + Join( + #[from] + #[inherit] + tokio::task::JoinError, + ), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + deno_core::error::ResourceError, + ), + #[class(inherit)] #[error("{0}")] - Io(std::io::Error), + Io(#[inherit] std::io::Error), } fn encoder_mode(mode: u32) -> Result { @@ -167,10 +181,7 @@ pub fn op_brotli_compress_stream( #[buffer] input: &[u8], #[buffer] output: &mut [u8], ) -> Result { - let ctx = state - .resource_table - .get::(rid) - .map_err(BrotliError::Resource)?; + let ctx = state.resource_table.get::(rid)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -199,10 +210,7 @@ pub fn op_brotli_compress_stream_end( #[smi] rid: u32, #[buffer] output: &mut [u8], ) -> Result { - let ctx = state - .resource_table - .get::(rid) - .map_err(BrotliError::Resource)?; + let ctx = state.resource_table.get::(rid)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -277,10 +285,7 @@ pub fn op_brotli_decompress_stream( #[buffer] input: &[u8], #[buffer] output: &mut [u8], ) -> Result { - let ctx = state - .resource_table - .get::(rid) - .map_err(BrotliError::Resource)?; + let ctx = state.resource_table.get::(rid)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; @@ -308,10 +313,7 @@ pub fn op_brotli_decompress_stream_end( #[smi] rid: u32, #[buffer] output: &mut [u8], ) -> Result { - let ctx = state - .resource_table - .get::(rid) - .map_err(BrotliError::Resource)?; + let ctx = state.resource_table.get::(rid)?; let mut inst = ctx.inst.borrow_mut(); let mut output_offset = 0; diff --git a/ext/node/ops/zlib/mod.rs b/ext/node/ops/zlib/mod.rs index b5277e7a342c30..892944bcea35f2 100644 --- a/ext/node/ops/zlib/mod.rs +++ b/ext/node/ops/zlib/mod.rs @@ -4,6 +4,7 @@ use std::borrow::Cow; use std::cell::RefCell; use deno_core::op2; +use deno_error::JsErrorBox; use libc::c_ulong; use zlib::*; @@ -18,11 +19,11 @@ use mode::Mode; use self::stream::StreamWrapper; #[inline] -fn check(condition: bool, msg: &str) -> Result<(), deno_core::error::AnyError> { +fn check(condition: bool, msg: &str) -> Result<(), JsErrorBox> { if condition { Ok(()) } else { - Err(deno_core::error::type_error(msg.to_string())) + Err(JsErrorBox::type_error(msg.to_string())) } } @@ -57,7 +58,7 @@ impl ZlibInner { out_off: u32, out_len: u32, flush: Flush, - ) -> Result<(), deno_core::error::AnyError> { + ) -> Result<(), JsErrorBox> { check(self.init_done, "write before init")?; check(!self.write_in_progress, "write already in progress")?; check(!self.pending_close, "close already in progress")?; @@ -66,11 +67,11 @@ impl ZlibInner { let next_in = input .get(in_off as usize..in_off as usize + in_len as usize) - .ok_or_else(|| deno_core::error::type_error("invalid input range"))? + .ok_or_else(|| JsErrorBox::type_error("invalid input range"))? .as_ptr() as *mut _; let next_out = out .get_mut(out_off as usize..out_off as usize + out_len as usize) - .ok_or_else(|| deno_core::error::type_error("invalid output range"))? + .ok_or_else(|| JsErrorBox::type_error("invalid output range"))? .as_mut_ptr(); self.strm.avail_in = in_len; @@ -82,10 +83,7 @@ impl ZlibInner { Ok(()) } - fn do_write( - &mut self, - flush: Flush, - ) -> Result<(), deno_core::error::AnyError> { + fn do_write(&mut self, flush: Flush) -> Result<(), JsErrorBox> { self.flush = flush; match self.mode { Mode::Deflate | Mode::Gzip | Mode::DeflateRaw => { @@ -131,7 +129,7 @@ impl ZlibInner { self.mode = Mode::Inflate; } } else if next_expected_header_byte.is_some() { - return Err(deno_core::error::type_error( + return Err(JsErrorBox::type_error( "invalid number of gzip magic number bytes read", )); } @@ -185,7 +183,7 @@ impl ZlibInner { Ok(()) } - fn init_stream(&mut self) -> Result<(), deno_core::error::AnyError> { + fn init_stream(&mut self) -> Result<(), JsErrorBox> { match self.mode { Mode::Gzip | Mode::Gunzip => self.window_bits += 16, Mode::Unzip => self.window_bits += 32, @@ -203,7 +201,7 @@ impl ZlibInner { Mode::Inflate | Mode::Gunzip | Mode::InflateRaw | Mode::Unzip => { self.strm.inflate_init(self.window_bits) } - Mode::None => return Err(deno_core::error::type_error("Unknown mode")), + Mode::None => return Err(JsErrorBox::type_error("Unknown mode")), }; self.write_in_progress = false; @@ -212,7 +210,7 @@ impl ZlibInner { Ok(()) } - fn close(&mut self) -> Result { + fn close(&mut self) -> Result { if self.write_in_progress { self.pending_close = true; return Ok(false); @@ -258,14 +256,25 @@ pub fn op_zlib_new(#[smi] mode: i32) -> Result { }) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ZlibError { + #[class(type)] #[error("zlib not initialized")] NotInitialized, + #[class(inherit)] #[error(transparent)] - Mode(#[from] mode::ModeError), + Mode( + #[from] + #[inherit] + mode::ModeError, + ), + #[class(inherit)] #[error(transparent)] - Other(#[from] deno_core::error::AnyError), + Other( + #[from] + #[inherit] + JsErrorBox, + ), } #[op2(fast)] diff --git a/ext/node/ops/zlib/mode.rs b/ext/node/ops/zlib/mode.rs index c0660a7c7a406f..5fa2e501dc2225 100644 --- a/ext/node/ops/zlib/mode.rs +++ b/ext/node/ops/zlib/mode.rs @@ -1,6 +1,7 @@ // Copyright 2018-2025 the Deno authors. MIT license. -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] #[error("bad argument")] pub struct ModeError; diff --git a/ext/telemetry/Cargo.toml b/ext/telemetry/Cargo.toml index 4328b707d328e9..484a90eeb11942 100644 --- a/ext/telemetry/Cargo.toml +++ b/ext/telemetry/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] async-trait.workspace = true deno_core.workspace = true +deno_error.workspace = true http-body-util.workspace = true hyper.workspace = true hyper-util.workspace = true @@ -28,4 +29,5 @@ opentelemetry-semantic-conventions.workspace = true opentelemetry_sdk.workspace = true pin-project.workspace = true serde.workspace = true +thiserror.workspace = true tokio.workspace = true diff --git a/ext/telemetry/lib.rs b/ext/telemetry/lib.rs index 261e93124d1743..ce3f34a0af9177 100644 --- a/ext/telemetry/lib.rs +++ b/ext/telemetry/lib.rs @@ -18,10 +18,6 @@ use std::thread; use std::time::Duration; use std::time::SystemTime; -use deno_core::anyhow; -use deno_core::anyhow::anyhow; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::futures::channel::mpsc; use deno_core::futures::channel::mpsc::UnboundedSender; use deno_core::futures::future::BoxFuture; @@ -31,8 +27,11 @@ use deno_core::futures::Stream; use deno_core::futures::StreamExt; use deno_core::op2; use deno_core::v8; +use deno_core::v8::DataError; use deno_core::GarbageCollected; use deno_core::OpState; +use deno_error::JsError; +use deno_error::JsErrorBox; use once_cell::sync::Lazy; use once_cell::sync::OnceCell; use opentelemetry::logs::AnyValue; @@ -83,6 +82,7 @@ use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_NAME; use opentelemetry_semantic_conventions::resource::TELEMETRY_SDK_VERSION; use serde::Deserialize; use serde::Serialize; +use thiserror::Error; use tokio::sync::oneshot; use tokio::task::JoinSet; @@ -563,7 +563,7 @@ static OTEL_GLOBALS: OnceCell = OnceCell::new(); pub fn init( rt_config: OtelRuntimeConfig, config: &OtelConfig, -) -> anyhow::Result<()> { +) -> deno_core::anyhow::Result<()> { // Parse the `OTEL_EXPORTER_OTLP_PROTOCOL` variable. The opentelemetry_* // crates don't do this automatically. // TODO(piscisaureus): enable GRPC support. @@ -572,13 +572,13 @@ pub fn init( Ok("http/json") => Protocol::HttpJson, Ok("") | Err(env::VarError::NotPresent) => Protocol::HttpBinary, Ok(protocol) => { - return Err(anyhow!( + return Err(deno_core::anyhow::anyhow!( "Env var OTEL_EXPORTER_OTLP_PROTOCOL specifies an unsupported protocol: {}", protocol )); } Err(err) => { - return Err(anyhow!( + return Err(deno_core::anyhow::anyhow!( "Failed to read env var OTEL_EXPORTER_OTLP_PROTOCOL: {}", err )); @@ -645,7 +645,7 @@ pub fn init( Some("delta") => Temporality::Delta, Some("lowmemory") => Temporality::LowMemory, Some(other) => { - return Err(anyhow!( + return Err(deno_core::anyhow::anyhow!( "Invalid value for OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE: {}", other )); @@ -688,7 +688,7 @@ pub fn init( meter_provider, builtin_instrumentation_scope, }) - .map_err(|_| anyhow!("failed to set otel globals"))?; + .map_err(|_| deno_core::anyhow::anyhow!("failed to set otel globals"))?; Ok(()) } @@ -1100,7 +1100,7 @@ impl OtelTracer { #[smi] span_kind: u8, start_time: Option, #[smi] attribute_count: usize, - ) -> Result { + ) -> Result { let OtelGlobals { id_generator, .. } = OTEL_GLOBALS.get().unwrap(); let span_context; let parent_span_id; @@ -1131,20 +1131,25 @@ impl OtelTracer { parent_span_id = SpanId::INVALID; } } - let name = owned_string(scope, name.try_cast()?); + let name = owned_string( + scope, + name + .try_cast() + .map_err(|e: DataError| JsErrorBox::generic(e.to_string()))?, + ); let span_kind = match span_kind { 0 => SpanKind::Internal, 1 => SpanKind::Server, 2 => SpanKind::Client, 3 => SpanKind::Producer, 4 => SpanKind::Consumer, - _ => return Err(anyhow!("invalid span kind")), + _ => return Err(JsErrorBox::generic("invalid span kind")), }; let start_time = start_time .map(|start_time| { SystemTime::UNIX_EPOCH .checked_add(std::time::Duration::from_secs_f64(start_time)) - .ok_or_else(|| anyhow!("invalid start time")) + .ok_or_else(|| JsErrorBox::generic("invalid start time")) }) .unwrap_or_else(|| Ok(SystemTime::now()))?; let span_data = SpanData { @@ -1176,14 +1181,14 @@ impl OtelTracer { #[smi] span_kind: u8, start_time: Option, #[smi] attribute_count: usize, - ) -> Result { + ) -> Result { let parent_trace_id = parse_trace_id(scope, parent_trace_id); if parent_trace_id == TraceId::INVALID { - return Err(anyhow!("invalid trace id")); + return Err(JsErrorBox::generic("invalid trace id")); }; let parent_span_id = parse_span_id(scope, parent_span_id); if parent_span_id == SpanId::INVALID { - return Err(anyhow!("invalid span id")); + return Err(JsErrorBox::generic("invalid span id")); }; let OtelGlobals { id_generator, .. } = OTEL_GLOBALS.get().unwrap(); let span_context = SpanContext::new( @@ -1193,20 +1198,25 @@ impl OtelTracer { false, TraceState::NONE, ); - let name = owned_string(scope, name.try_cast()?); + let name = owned_string( + scope, + name + .try_cast() + .map_err(|e: DataError| JsErrorBox::generic(e.to_string()))?, + ); let span_kind = match span_kind { 0 => SpanKind::Internal, 1 => SpanKind::Server, 2 => SpanKind::Client, 3 => SpanKind::Producer, 4 => SpanKind::Consumer, - _ => return Err(anyhow!("invalid span kind")), + _ => return Err(JsErrorBox::generic("invalid span kind")), }; let start_time = start_time .map(|start_time| { SystemTime::UNIX_EPOCH .checked_add(std::time::Duration::from_secs_f64(start_time)) - .ok_or_else(|| anyhow!("invalid start time")) + .ok_or_else(|| JsErrorBox::generic("invalid start time")) }) .unwrap_or_else(|| Ok(SystemTime::now()))?; let span_data = SpanData { @@ -1237,6 +1247,16 @@ struct JsSpanContext { trace_flags: u8, } +#[derive(Debug, Error, JsError)] +#[error("OtelSpan cannot be constructed.")] +#[class(type)] +struct OtelSpanCannotBeConstructedError; + +#[derive(Debug, Error, JsError)] +#[error("invalid span status code")] +#[class(type)] +struct InvalidSpanStatusCodeError; + // boxed because of https://github.com/denoland/rusty_v8/issues/1676 #[derive(Debug)] struct OtelSpan(RefCell>); @@ -1254,8 +1274,8 @@ impl deno_core::GarbageCollected for OtelSpan {} impl OtelSpan { #[constructor] #[cppgc] - fn new() -> Result { - Err(type_error("OtelSpan can not be constructed.")) + fn new() -> Result { + Err(OtelSpanCannotBeConstructedError) } #[serde] @@ -1277,7 +1297,7 @@ impl OtelSpan { &self, #[smi] status: u8, #[string] error_description: String, - ) -> Result<(), AnyError> { + ) -> Result<(), InvalidSpanStatusCodeError> { let mut state = self.0.borrow_mut(); let OtelSpanState::Recording(span) = &mut **state else { return Ok(()); @@ -1288,7 +1308,7 @@ impl OtelSpan { 2 => SpanStatus::Error { description: Cow::Owned(error_description), }, - _ => return Err(type_error("invalid span status code")), + _ => return Err(InvalidSpanStatusCodeError), }; Ok(()) } @@ -1464,7 +1484,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_instrument( |name| self.0.f64_counter(name), |i| Instrument::Counter(i.build()), @@ -1473,6 +1493,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } #[cppgc] @@ -1482,7 +1503,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_instrument( |name| self.0.f64_up_down_counter(name), |i| Instrument::UpDownCounter(i.build()), @@ -1491,6 +1512,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } #[cppgc] @@ -1500,7 +1522,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_instrument( |name| self.0.f64_gauge(name), |i| Instrument::Gauge(i.build()), @@ -1509,6 +1531,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } #[cppgc] @@ -1519,15 +1542,30 @@ impl OtelMeter { description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, #[serde] boundaries: Option>, - ) -> Result { - let name = owned_string(scope, name.try_cast()?); + ) -> Result { + let name = owned_string( + scope, + name + .try_cast() + .map_err(|e: DataError| JsErrorBox::generic(e.to_string()))?, + ); let mut builder = self.0.f64_histogram(name); if !description.is_null_or_undefined() { - let description = owned_string(scope, description.try_cast()?); + let description = owned_string( + scope, + description + .try_cast() + .map_err(|e: DataError| JsErrorBox::generic(e.to_string()))?, + ); builder = builder.with_description(description); }; if !unit.is_null_or_undefined() { - let unit = owned_string(scope, unit.try_cast()?); + let unit = owned_string( + scope, + unit + .try_cast() + .map_err(|e: DataError| JsErrorBox::generic(e.to_string()))?, + ); builder = builder.with_unit(unit); }; if let Some(boundaries) = boundaries { @@ -1544,7 +1582,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_async_instrument( |name| self.0.f64_observable_counter(name), |i| { @@ -1555,6 +1593,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } #[cppgc] @@ -1564,7 +1603,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_async_instrument( |name| self.0.f64_observable_up_down_counter(name), |i| { @@ -1575,6 +1614,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } #[cppgc] @@ -1584,7 +1624,7 @@ impl OtelMeter { name: v8::Local<'s, v8::Value>, description: v8::Local<'s, v8::Value>, unit: v8::Local<'s, v8::Value>, - ) -> Result { + ) -> Result { create_async_instrument( |name| self.0.f64_observable_gauge(name), |i| { @@ -1595,6 +1635,7 @@ impl OtelMeter { description, unit, ) + .map_err(|e| JsErrorBox::generic(e.to_string())) } } @@ -1615,7 +1656,7 @@ fn create_instrument<'a, 'b, T>( name: v8::Local<'a, v8::Value>, description: v8::Local<'a, v8::Value>, unit: v8::Local<'a, v8::Value>, -) -> Result { +) -> Result { let name = owned_string(scope, name.try_cast()?); let mut builder = cb(name); if !description.is_null_or_undefined() { @@ -1637,7 +1678,7 @@ fn create_async_instrument<'a, 'b, T>( name: v8::Local<'a, v8::Value>, description: v8::Local<'a, v8::Value>, unit: v8::Local<'a, v8::Value>, -) -> Result { +) -> Result { let name = owned_string(scope, name.try_cast()?); let mut builder = cb(name); if !description.is_null_or_undefined() { diff --git a/ext/tls/Cargo.toml b/ext/tls/Cargo.toml index 53d85ba83cf266..1e804bd5386089 100644 --- a/ext/tls/Cargo.toml +++ b/ext/tls/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_native_certs = "0.3.0" rustls.workspace = true rustls-pemfile.workspace = true diff --git a/ext/tls/lib.rs b/ext/tls/lib.rs index 63e78e3f834241..a3e386052ef873 100644 --- a/ext/tls/lib.rs +++ b/ext/tls/lib.rs @@ -5,6 +5,7 @@ use std::io::Cursor; use std::net::IpAddr; use std::sync::Arc; +use deno_error::JsErrorBox; pub use deno_native_certs; pub use rustls; use rustls::client::danger::HandshakeSignatureValid; @@ -30,18 +31,24 @@ pub use webpki_roots; mod tls_key; pub use tls_key::*; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum TlsError { + #[class(generic)] #[error(transparent)] Rustls(#[from] rustls::Error), + #[class(inherit)] #[error("Unable to add pem file to certificate store: {0}")] UnableAddPemFileToCert(std::io::Error), + #[class("InvalidData")] #[error("Unable to decode certificate")] CertInvalid, + #[class("InvalidData")] #[error("No certificates found in certificate data")] CertsNotFound, + #[class("InvalidData")] #[error("No keys found in key data")] KeysNotFound, + #[class("InvalidData")] #[error("Unable to decode key")] KeyDecode, } @@ -51,9 +58,7 @@ pub enum TlsError { /// This was done because the root cert store is not needed in all cases /// and takes a bit of time to initialize. pub trait RootCertStoreProvider: Send + Sync { - fn get_or_try_init( - &self, - ) -> Result<&RootCertStore, deno_core::error::AnyError>; + fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox>; } // This extension has no runtime apis, it only exports some shared native functions. diff --git a/ext/url/Cargo.toml b/ext/url/Cargo.toml index 4dcb6f42ca2ca1..1f7f7b36c6101c 100644 --- a/ext/url/Cargo.toml +++ b/ext/url/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true thiserror.workspace = true urlpattern = "0.3.0" diff --git a/ext/url/lib.rs b/ext/url/lib.rs index 0e9ca5839a7218..dd74239d93bce3 100644 --- a/ext/url/lib.rs +++ b/ext/url/lib.rs @@ -4,15 +4,13 @@ mod urlpattern; use std::path::PathBuf; -use deno_core::error::type_error; -use deno_core::error::AnyError; use deno_core::op2; use deno_core::url::form_urlencoded; use deno_core::url::quirks; use deno_core::url::Url; use deno_core::JsBuffer; use deno_core::OpState; -pub use urlpattern::UrlPatternError; +use deno_error::JsErrorBox; use crate::urlpattern::op_urlpattern_parse; use crate::urlpattern::op_urlpattern_process_match_input; @@ -220,7 +218,7 @@ pub fn op_url_reparse( pub fn op_url_parse_search_params( #[string] args: Option, #[buffer] zero_copy: Option, -) -> Result, AnyError> { +) -> Result, JsErrorBox> { let params = match (args, zero_copy) { (None, Some(zero_copy)) => form_urlencoded::parse(&zero_copy) .into_iter() @@ -230,7 +228,7 @@ pub fn op_url_parse_search_params( .into_iter() .map(|(k, v)| (k.as_ref().to_owned(), v.as_ref().to_owned())) .collect(), - _ => return Err(type_error("invalid parameters")), + _ => return Err(JsErrorBox::type_error("invalid parameters")), }; Ok(params) } diff --git a/ext/url/urlpattern.rs b/ext/url/urlpattern.rs index 88564625dab970..02034332cfb6cc 100644 --- a/ext/url/urlpattern.rs +++ b/ext/url/urlpattern.rs @@ -6,9 +6,7 @@ use urlpattern::quirks::MatchInput; use urlpattern::quirks::StringOrInit; use urlpattern::quirks::UrlPattern; -#[derive(Debug, thiserror::Error)] -#[error(transparent)] -pub struct UrlPatternError(urlpattern::Error); +deno_error::js_error_wrapper!(urlpattern::Error, UrlPatternError, "TypeError"); #[op2] #[serde] @@ -18,11 +16,9 @@ pub fn op_urlpattern_parse( #[serde] options: urlpattern::UrlPatternOptions, ) -> Result { let init = - quirks::process_construct_pattern_input(input, base_url.as_deref()) - .map_err(UrlPatternError)?; + quirks::process_construct_pattern_input(input, base_url.as_deref())?; - let pattern = - quirks::parse_pattern(init, options).map_err(UrlPatternError)?; + let pattern = quirks::parse_pattern(init, options)?; Ok(pattern) } @@ -33,8 +29,7 @@ pub fn op_urlpattern_process_match_input( #[serde] input: StringOrInit, #[string] base_url: Option, ) -> Result, UrlPatternError> { - let res = quirks::process_match_input(input, base_url.as_deref()) - .map_err(UrlPatternError)?; + let res = quirks::process_match_input(input, base_url.as_deref())?; let (input, inputs) = match res { Some((input, inputs)) => (input, inputs), diff --git a/ext/web/Cargo.toml b/ext/web/Cargo.toml index dd521ad03768f4..d5dbbdecca0b70 100644 --- a/ext/web/Cargo.toml +++ b/ext/web/Cargo.toml @@ -18,6 +18,7 @@ async-trait.workspace = true base64-simd = "0.8" bytes.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_permissions.workspace = true encoding_rs.workspace = true flate2 = { workspace = true, features = ["default"] } diff --git a/ext/web/blob.rs b/ext/web/blob.rs index 872333709383e4..555e6da1cfd184 100644 --- a/ext/web/blob.rs +++ b/ext/web/blob.rs @@ -17,14 +17,18 @@ use serde::Deserialize; use serde::Serialize; use uuid::Uuid; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BlobError { + #[class(type)] #[error("Blob part not found")] BlobPartNotFound, + #[class(type)] #[error("start + len can not be larger than blob part size")] SizeLargerThanBlobPart, + #[class(type)] #[error("Blob URLs are not supported in this context")] BlobURLsNotSupported, + #[class(generic)] #[error(transparent)] Url(#[from] deno_core::url::ParseError), } diff --git a/ext/web/compression.rs b/ext/web/compression.rs index 650cc84085b9a4..66662de74a4ddd 100644 --- a/ext/web/compression.rs +++ b/ext/web/compression.rs @@ -12,14 +12,18 @@ use flate2::write::ZlibDecoder; use flate2::write::ZlibEncoder; use flate2::Compression; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CompressionError { + #[class(type)] #[error("Unsupported format")] UnsupportedFormat, + #[class(type)] #[error("resource is closed")] ResourceClosed, + #[class(type)] #[error(transparent)] IoTypeError(std::io::Error), + #[class(inherit)] #[error(transparent)] Io(std::io::Error), } diff --git a/ext/web/lib.rs b/ext/web/lib.rs index 07b00f0049be3f..7d22fa3b2ac856 100644 --- a/ext/web/lib.rs +++ b/ext/web/lib.rs @@ -126,20 +126,27 @@ deno_core::extension!(deno_web, } ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum WebError { + #[class("DOMExceptionInvalidCharacterError")] #[error("Failed to decode base64")] Base64Decode, + #[class(range)] #[error("The encoding label provided ('{0}') is invalid.")] InvalidEncodingLabel(String), + #[class(type)] #[error("buffer exceeds maximum length")] BufferTooLong, + #[class(range)] #[error("Value too large to decode")] ValueTooLarge, + #[class(range)] #[error("Provided buffer too small")] BufferTooSmall, + #[class(type)] #[error("The encoded data is not valid")] DataInvalid, + #[class(generic)] #[error(transparent)] DataError(#[from] v8::DataError), } diff --git a/ext/web/message_port.rs b/ext/web/message_port.rs index b2aad6776f9389..3d656fdea20cc7 100644 --- a/ext/web/message_port.rs +++ b/ext/web/message_port.rs @@ -20,18 +20,23 @@ use tokio::sync::mpsc::unbounded_channel; use tokio::sync::mpsc::UnboundedReceiver; use tokio::sync::mpsc::UnboundedSender; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum MessagePortError { + #[class(type)] #[error("Invalid message port transfer")] InvalidTransfer, + #[class(type)] #[error("Message port is not ready for transfer")] NotReady, + #[class(type)] #[error("Can not transfer self message port")] TransferSelf, + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(deno_core::error::ResourceError), } pub enum Transferable { diff --git a/ext/web/stream_resource.rs b/ext/web/stream_resource.rs index 5613f57384ffb5..edc842ff4d38d5 100644 --- a/ext/web/stream_resource.rs +++ b/ext/web/stream_resource.rs @@ -31,10 +31,12 @@ use deno_core::ResourceId; use futures::future::poll_fn; use futures::TryFutureExt; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum StreamResourceError { + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), + #[class(type)] #[error("{0}")] Js(String), } @@ -404,7 +406,10 @@ impl Resource for ReadableStreamResource { } fn read(self: Rc, limit: usize) -> AsyncResult { - Box::pin(ReadableStreamResource::read(self, limit).map_err(|e| e.into())) + Box::pin( + ReadableStreamResource::read(self, limit) + .map_err(deno_error::JsErrorBox::from_err), + ) } fn close(self: Rc) { diff --git a/ext/webgpu/Cargo.toml b/ext/webgpu/Cargo.toml index 9aa1b2370ce3d2..4bb9fa9a418ce7 100644 --- a/ext/webgpu/Cargo.toml +++ b/ext/webgpu/Cargo.toml @@ -21,6 +21,7 @@ vulkan-portability = [] # so the whole workspace can built as wasm. [target.'cfg(not(target_arch = "wasm32"))'.dependencies] deno_core.workspace = true +deno_error.workspace = true serde = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["full"] } wgpu-types = { workspace = true, features = ["serde"] } diff --git a/ext/webgpu/binding.rs b/ext/webgpu/binding.rs index 2849cf9bfea12f..c8441c64afd937 100644 --- a/ext/webgpu/binding.rs +++ b/ext/webgpu/binding.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use std::rc::Rc; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -170,7 +170,7 @@ pub fn op_webgpu_create_bind_group_layout( #[smi] device_rid: ResourceId, #[string] label: Cow, #[serde] entries: Vec, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -209,7 +209,7 @@ pub fn op_webgpu_create_pipeline_layout( #[smi] device_rid: ResourceId, #[string] label: Cow, #[serde] bind_group_layouts: Vec, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -223,7 +223,7 @@ pub fn op_webgpu_create_pipeline_layout( state.resource_table.get::(rid)?; Ok(bind_group_layout.1) }) - .collect::, AnyError>>()?; + .collect::, ResourceError>>()?; let descriptor = wgpu_core::binding_model::PipelineLayoutDescriptor { label: Some(label), @@ -256,7 +256,7 @@ pub fn op_webgpu_create_bind_group( #[string] label: Cow, #[smi] layout: ResourceId, #[serde] entries: Vec, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -304,7 +304,7 @@ pub fn op_webgpu_create_bind_group( }, }) }) - .collect::, AnyError>>()?; + .collect::, ResourceError>>()?; let bind_group_layout = state.resource_table.get::(layout)?; diff --git a/ext/webgpu/buffer.rs b/ext/webgpu/buffer.rs index e8e33244c96df7..25a5606e12efbe 100644 --- a/ext/webgpu/buffer.rs +++ b/ext/webgpu/buffer.rs @@ -14,12 +14,19 @@ use deno_core::ResourceId; use super::error::WebGpuResult; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BufferError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + deno_core::error::ResourceError, + ), + #[class(type)] #[error("usage is not valid")] InvalidUsage, + #[class("DOMExceptionOperationError")] #[error(transparent)] Access(wgpu_core::resource::BufferAccessError), } @@ -58,8 +65,7 @@ pub fn op_webgpu_create_buffer( let instance = state.borrow::(); let device_resource = state .resource_table - .get::(device_rid) - .map_err(BufferError::Resource)?; + .get::(device_rid)?; let device = device_resource.1; let descriptor = wgpu_core::resource::BufferDescriptor { @@ -92,15 +98,12 @@ pub async fn op_webgpu_buffer_get_map_async( { let state_ = state.borrow(); let instance = state_.borrow::(); - let buffer_resource = state_ - .resource_table - .get::(buffer_rid) - .map_err(BufferError::Resource)?; + let buffer_resource = + state_.resource_table.get::(buffer_rid)?; let buffer = buffer_resource.1; let device_resource = state_ .resource_table - .get::(device_rid) - .map_err(BufferError::Resource)?; + .get::(device_rid)?; device = device_resource.1; let done_ = done.clone(); @@ -155,10 +158,7 @@ pub fn op_webgpu_buffer_get_mapped_range( #[buffer] buf: &mut [u8], ) -> Result { let instance = state.borrow::(); - let buffer_resource = state - .resource_table - .get::(buffer_rid) - .map_err(BufferError::Resource)?; + let buffer_resource = state.resource_table.get::(buffer_rid)?; let buffer = buffer_resource.1; let (slice_pointer, range_size) = @@ -192,13 +192,9 @@ pub fn op_webgpu_buffer_unmap( ) -> Result { let mapped_resource = state .resource_table - .take::(mapped_rid) - .map_err(BufferError::Resource)?; + .take::(mapped_rid)?; let instance = state.borrow::(); - let buffer_resource = state - .resource_table - .get::(buffer_rid) - .map_err(BufferError::Resource)?; + let buffer_resource = state.resource_table.get::(buffer_rid)?; let buffer = buffer_resource.1; if let Some(buf) = buf { diff --git a/ext/webgpu/bundle.rs b/ext/webgpu/bundle.rs index 5fc147a44a2c44..73c3c9f2213515 100644 --- a/ext/webgpu/bundle.rs +++ b/ext/webgpu/bundle.rs @@ -4,6 +4,7 @@ use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -12,10 +13,16 @@ use serde::Deserialize; use super::error::WebGpuResult; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BundleError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + ResourceError, + ), + #[class(type)] #[error("size must be larger than 0")] InvalidSize, } @@ -60,7 +67,7 @@ pub struct CreateRenderBundleEncoderArgs { pub fn op_webgpu_create_render_bundle_encoder( state: &mut OpState, #[serde] args: CreateRenderBundleEncoderArgs, -) -> Result { +) -> Result { let device_resource = state .resource_table .get::(args.device_rid)?; @@ -107,7 +114,7 @@ pub fn op_webgpu_render_bundle_encoder_finish( state: &mut OpState, #[smi] render_bundle_encoder_rid: ResourceId, #[string] label: Cow, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -138,7 +145,7 @@ pub fn op_webgpu_render_bundle_encoder_set_bind_group( #[buffer] dynamic_offsets_data: &[u32], #[number] dynamic_offsets_data_start: usize, #[number] dynamic_offsets_data_length: usize, -) -> Result { +) -> Result { let bind_group_resource = state .resource_table @@ -178,7 +185,7 @@ pub fn op_webgpu_render_bundle_encoder_push_debug_group( state: &mut OpState, #[smi] render_bundle_encoder_rid: ResourceId, #[string] group_label: &str, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -202,7 +209,7 @@ pub fn op_webgpu_render_bundle_encoder_push_debug_group( pub fn op_webgpu_render_bundle_encoder_pop_debug_group( state: &mut OpState, #[smi] render_bundle_encoder_rid: ResourceId, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -221,7 +228,7 @@ pub fn op_webgpu_render_bundle_encoder_insert_debug_marker( state: &mut OpState, #[smi] render_bundle_encoder_rid: ResourceId, #[string] marker_label: &str, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -246,7 +253,7 @@ pub fn op_webgpu_render_bundle_encoder_set_pipeline( state: &mut OpState, #[smi] render_bundle_encoder_rid: ResourceId, #[smi] pipeline: ResourceId, -) -> Result { +) -> Result { let render_pipeline_resource = state .resource_table @@ -276,12 +283,11 @@ pub fn op_webgpu_render_bundle_encoder_set_index_buffer( ) -> Result { let buffer_resource = state .resource_table - .get::(buffer) - .map_err(BundleError::Resource)?; - let render_bundle_encoder_resource = state - .resource_table - .get::(render_bundle_encoder_rid) - .map_err(BundleError::Resource)?; + .get::(buffer)?; + let render_bundle_encoder_resource = + state + .resource_table + .get::(render_bundle_encoder_rid)?; let size = Some(std::num::NonZeroU64::new(size).ok_or(BundleError::InvalidSize)?); @@ -305,12 +311,11 @@ pub fn op_webgpu_render_bundle_encoder_set_vertex_buffer( ) -> Result { let buffer_resource = state .resource_table - .get::(buffer) - .map_err(BundleError::Resource)?; - let render_bundle_encoder_resource = state - .resource_table - .get::(render_bundle_encoder_rid) - .map_err(BundleError::Resource)?; + .get::(buffer)?; + let render_bundle_encoder_resource = + state + .resource_table + .get::(render_bundle_encoder_rid)?; let size = if let Some(size) = size { Some(std::num::NonZeroU64::new(size).ok_or(BundleError::InvalidSize)?) } else { @@ -337,7 +342,7 @@ pub fn op_webgpu_render_bundle_encoder_draw( instance_count: u32, first_vertex: u32, first_instance: u32, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -364,7 +369,7 @@ pub fn op_webgpu_render_bundle_encoder_draw_indexed( first_index: u32, base_vertex: i32, first_instance: u32, -) -> Result { +) -> Result { let render_bundle_encoder_resource = state .resource_table @@ -389,7 +394,7 @@ pub fn op_webgpu_render_bundle_encoder_draw_indirect( #[smi] render_bundle_encoder_rid: ResourceId, #[smi] indirect_buffer: ResourceId, #[number] indirect_offset: u64, -) -> Result { +) -> Result { let buffer_resource = state .resource_table .get::(indirect_buffer)?; diff --git a/ext/webgpu/byow.rs b/ext/webgpu/byow.rs index 7c16c8a0d2be02..e911e1402bc3d3 100644 --- a/ext/webgpu/byow.rs +++ b/ext/webgpu/byow.rs @@ -15,18 +15,23 @@ use deno_core::ResourceId; use crate::surface::WebGpuSurface; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ByowError { + #[class(type)] #[error("Cannot create surface outside of WebGPU context. Did you forget to call `navigator.gpu.requestAdapter()`?")] WebGPUNotInitiated, + #[class(type)] #[error("Invalid parameters")] InvalidParameters, + #[class(generic)] #[error(transparent)] CreateSurface(wgpu_core::instance::CreateSurfaceError), #[cfg(target_os = "windows")] + #[class(type)] #[error("Invalid system on Windows")] InvalidSystem, #[cfg(target_os = "macos")] + #[class(type)] #[error("Invalid system on macOS")] InvalidSystem, #[cfg(any( @@ -34,6 +39,7 @@ pub enum ByowError { target_os = "freebsd", target_os = "openbsd" ))] + #[class(type)] #[error("Invalid system on Linux/BSD")] InvalidSystem, #[cfg(any( @@ -42,6 +48,7 @@ pub enum ByowError { target_os = "freebsd", target_os = "openbsd" ))] + #[class(type)] #[error("window is null")] NullWindow, #[cfg(any( @@ -49,9 +56,11 @@ pub enum ByowError { target_os = "freebsd", target_os = "openbsd" ))] + #[class(type)] #[error("display is null")] NullDisplay, #[cfg(target_os = "macos")] + #[class(type)] #[error("ns_view is null")] NSViewDisplay, } @@ -199,6 +208,6 @@ fn raw_window( _system: &str, _window: *const c_void, _display: *const c_void, -) -> Result { - Err(deno_core::error::type_error("Unsupported platform")) +) -> Result { + Err(deno_error::JsErrorBox::type_error("Unsupported platform")) } diff --git a/ext/webgpu/command_encoder.rs b/ext/webgpu/command_encoder.rs index 4b14345a2c44d2..9b6bb44ae8f6da 100644 --- a/ext/webgpu/command_encoder.rs +++ b/ext/webgpu/command_encoder.rs @@ -4,7 +4,7 @@ use std::borrow::Cow; use std::cell::RefCell; use std::rc::Rc; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -50,7 +50,7 @@ pub fn op_webgpu_create_command_encoder( state: &mut OpState, #[smi] device_rid: ResourceId, #[string] label: Cow, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -110,7 +110,7 @@ pub fn op_webgpu_command_encoder_begin_render_pass( >, #[smi] occlusion_query_set: Option, #[serde] timestamp_writes: Option, -) -> Result { +) -> Result { let command_encoder_resource = state .resource_table .get::(command_encoder_rid)?; @@ -149,7 +149,7 @@ pub fn op_webgpu_command_encoder_begin_render_pass( }; Ok(rp_at) }) - .collect::, AnyError>>()?; + .collect::, ResourceError>>()?; let mut processed_depth_stencil_attachment = None; @@ -245,7 +245,7 @@ pub fn op_webgpu_command_encoder_begin_compute_pass( #[smi] command_encoder_rid: ResourceId, #[string] label: Cow, #[serde] timestamp_writes: Option, -) -> Result { +) -> Result { let command_encoder_resource = state .resource_table .get::(command_encoder_rid)?; @@ -295,7 +295,7 @@ pub fn op_webgpu_command_encoder_copy_buffer_to_buffer( #[smi] destination: ResourceId, #[number] destination_offset: u64, #[number] size: u64, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -347,7 +347,7 @@ pub fn op_webgpu_command_encoder_copy_buffer_to_texture( #[serde] source: GpuImageCopyBuffer, #[serde] destination: GpuImageCopyTexture, #[serde] copy_size: wgpu_types::Extent3d, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -392,7 +392,7 @@ pub fn op_webgpu_command_encoder_copy_texture_to_buffer( #[serde] source: GpuImageCopyTexture, #[serde] destination: GpuImageCopyBuffer, #[serde] copy_size: wgpu_types::Extent3d, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -437,7 +437,7 @@ pub fn op_webgpu_command_encoder_copy_texture_to_texture( #[serde] source: GpuImageCopyTexture, #[serde] destination: GpuImageCopyTexture, #[serde] copy_size: wgpu_types::Extent3d, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -480,7 +480,7 @@ pub fn op_webgpu_command_encoder_clear_buffer( #[smi] buffer_rid: ResourceId, #[number] offset: u64, #[number] size: u64, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -504,7 +504,7 @@ pub fn op_webgpu_command_encoder_push_debug_group( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, #[string] group_label: &str, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -519,7 +519,7 @@ pub fn op_webgpu_command_encoder_push_debug_group( pub fn op_webgpu_command_encoder_pop_debug_group( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -535,7 +535,7 @@ pub fn op_webgpu_command_encoder_insert_debug_marker( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, #[string] marker_label: &str, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -555,7 +555,7 @@ pub fn op_webgpu_command_encoder_write_timestamp( #[smi] command_encoder_rid: ResourceId, #[smi] query_set: ResourceId, query_index: u32, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -582,7 +582,7 @@ pub fn op_webgpu_command_encoder_resolve_query_set( query_count: u32, #[smi] destination: ResourceId, #[number] destination_offset: u64, -) -> Result { +) -> Result { let instance = state.borrow::(); let command_encoder_resource = state .resource_table @@ -611,7 +611,7 @@ pub fn op_webgpu_command_encoder_finish( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, #[string] label: Cow, -) -> Result { +) -> Result { let command_encoder_resource = state .resource_table .take::(command_encoder_rid)?; diff --git a/ext/webgpu/compute_pass.rs b/ext/webgpu/compute_pass.rs index 22cd522d8a120f..afa19b3faced52 100644 --- a/ext/webgpu/compute_pass.rs +++ b/ext/webgpu/compute_pass.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use std::cell::RefCell; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -26,7 +26,7 @@ pub fn op_webgpu_compute_pass_set_pipeline( state: &mut OpState, #[smi] compute_pass_rid: ResourceId, #[smi] pipeline: ResourceId, -) -> Result { +) -> Result { let compute_pipeline_resource = state .resource_table @@ -51,7 +51,7 @@ pub fn op_webgpu_compute_pass_dispatch_workgroups( x: u32, y: u32, z: u32, -) -> Result { +) -> Result { let compute_pass_resource = state .resource_table .get::(compute_pass_rid)?; @@ -73,7 +73,7 @@ pub fn op_webgpu_compute_pass_dispatch_workgroups_indirect( #[smi] compute_pass_rid: ResourceId, #[smi] indirect_buffer: ResourceId, #[number] indirect_offset: u64, -) -> Result { +) -> Result { let buffer_resource = state .resource_table .get::(indirect_buffer)?; @@ -96,7 +96,7 @@ pub fn op_webgpu_compute_pass_end( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, #[smi] compute_pass_rid: ResourceId, -) -> Result { +) -> Result { let command_encoder_resource = state .resource_table .get::( @@ -125,7 +125,7 @@ pub fn op_webgpu_compute_pass_set_bind_group( #[buffer] dynamic_offsets_data: &[u32], #[number] dynamic_offsets_data_start: usize, #[number] dynamic_offsets_data_length: usize, -) -> Result { +) -> Result { let bind_group_resource = state .resource_table @@ -159,7 +159,7 @@ pub fn op_webgpu_compute_pass_push_debug_group( state: &mut OpState, #[smi] compute_pass_rid: ResourceId, #[string] group_label: &str, -) -> Result { +) -> Result { let compute_pass_resource = state .resource_table .get::(compute_pass_rid)?; @@ -178,7 +178,7 @@ pub fn op_webgpu_compute_pass_push_debug_group( pub fn op_webgpu_compute_pass_pop_debug_group( state: &mut OpState, #[smi] compute_pass_rid: ResourceId, -) -> Result { +) -> Result { let compute_pass_resource = state .resource_table .get::(compute_pass_rid)?; @@ -196,7 +196,7 @@ pub fn op_webgpu_compute_pass_insert_debug_marker( state: &mut OpState, #[smi] compute_pass_rid: ResourceId, #[string] marker_label: &str, -) -> Result { +) -> Result { let compute_pass_resource = state .resource_table .get::(compute_pass_rid)?; diff --git a/ext/webgpu/lib.rs b/ext/webgpu/lib.rs index bdf0f39b63c6cb..afcd808f74349a 100644 --- a/ext/webgpu/lib.rs +++ b/ext/webgpu/lib.rs @@ -83,14 +83,22 @@ pub mod shader; pub mod surface; pub mod texture; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum InitError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + deno_core::error::ResourceError, + ), + #[class(generic)] #[error(transparent)] InvalidAdapter(wgpu_core::instance::InvalidAdapter), + #[class("DOMExceptionOperationError")] #[error(transparent)] RequestDevice(wgpu_core::instance::RequestDeviceError), + #[class(generic)] #[error(transparent)] InvalidDevice(wgpu_core::device::InvalidDevice), } @@ -676,10 +684,8 @@ pub fn op_webgpu_request_device( #[serde] required_limits: Option, ) -> Result { let mut state = state.borrow_mut(); - let adapter_resource = state - .resource_table - .take::(adapter_rid) - .map_err(InitError::Resource)?; + let adapter_resource = + state.resource_table.take::(adapter_rid)?; let adapter = adapter_resource.1; let instance = state.borrow::(); @@ -738,10 +744,8 @@ pub fn op_webgpu_request_adapter_info( #[smi] adapter_rid: ResourceId, ) -> Result { let state = state.borrow_mut(); - let adapter_resource = state - .resource_table - .get::(adapter_rid) - .map_err(InitError::Resource)?; + let adapter_resource = + state.resource_table.get::(adapter_rid)?; let adapter = adapter_resource.1; let instance = state.borrow::(); @@ -788,10 +792,8 @@ pub fn op_webgpu_create_query_set( state: &mut OpState, #[serde] args: CreateQuerySetArgs, ) -> Result { - let device_resource = state - .resource_table - .get::(args.device_rid) - .map_err(InitError::Resource)?; + let device_resource = + state.resource_table.get::(args.device_rid)?; let device = device_resource.1; let instance = state.borrow::(); diff --git a/ext/webgpu/pipeline.rs b/ext/webgpu/pipeline.rs index 87b1610ad7b69c..07452a1cafcada 100644 --- a/ext/webgpu/pipeline.rs +++ b/ext/webgpu/pipeline.rs @@ -4,7 +4,7 @@ use std::borrow::Cow; use std::collections::HashMap; use std::rc::Rc; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -88,7 +88,7 @@ pub fn op_webgpu_create_compute_pipeline( #[string] label: Cow, #[serde] layout: GPUPipelineLayoutOrGPUAutoLayoutMode, #[serde] compute: GpuProgrammableStage, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -156,7 +156,7 @@ pub fn op_webgpu_compute_pipeline_get_bind_group_layout( state: &mut OpState, #[smi] compute_pipeline_rid: ResourceId, index: u32, -) -> Result { +) -> Result { let instance = state.borrow::(); let compute_pipeline_resource = state .resource_table @@ -335,7 +335,7 @@ pub struct CreateRenderPipelineArgs { pub fn op_webgpu_create_render_pipeline( state: &mut OpState, #[serde] args: CreateRenderPipelineArgs, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -434,7 +434,7 @@ pub fn op_webgpu_render_pipeline_get_bind_group_layout( state: &mut OpState, #[smi] render_pipeline_rid: ResourceId, index: u32, -) -> Result { +) -> Result { let instance = state.borrow::(); let render_pipeline_resource = state .resource_table diff --git a/ext/webgpu/queue.rs b/ext/webgpu/queue.rs index 4f367f54692551..51f4c4e009aeb9 100644 --- a/ext/webgpu/queue.rs +++ b/ext/webgpu/queue.rs @@ -3,7 +3,7 @@ use std::borrow::Cow; use std::rc::Rc; -use deno_core::error::AnyError; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -31,7 +31,7 @@ pub fn op_webgpu_queue_submit( state: &mut OpState, #[smi] queue_rid: ResourceId, #[serde] command_buffers: Vec, -) -> Result { +) -> Result { let instance = state.borrow::(); let queue_resource = state.resource_table.get::(queue_rid)?; let queue = queue_resource.1; @@ -44,7 +44,7 @@ pub fn op_webgpu_queue_submit( let mut id = buffer_resource.1.borrow_mut(); Ok(id.take().unwrap()) }) - .collect::, AnyError>>()?; + .collect::, ResourceError>>()?; let maybe_err = gfx_select!(queue => instance.queue_submit(queue, &ids)).err(); @@ -85,7 +85,7 @@ pub fn op_webgpu_write_buffer( #[number] data_offset: usize, #[number] size: Option, #[buffer] buf: &[u8], -) -> Result { +) -> Result { let instance = state.borrow::(); let buffer_resource = state .resource_table @@ -118,7 +118,7 @@ pub fn op_webgpu_write_texture( #[serde] data_layout: GpuImageDataLayout, #[serde] size: wgpu_types::Extent3d, #[buffer] buf: &[u8], -) -> Result { +) -> Result { let instance = state.borrow::(); let texture_resource = state .resource_table diff --git a/ext/webgpu/render_pass.rs b/ext/webgpu/render_pass.rs index 41d610c0f9e34b..43c4cae8460aa8 100644 --- a/ext/webgpu/render_pass.rs +++ b/ext/webgpu/render_pass.rs @@ -3,6 +3,7 @@ use std::borrow::Cow; use std::cell::RefCell; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -11,10 +12,16 @@ use serde::Deserialize; use super::error::WebGpuResult; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum RenderPassError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + ResourceError, + ), + #[class(type)] #[error("size must be larger than 0")] InvalidSize, } @@ -45,7 +52,7 @@ pub struct RenderPassSetViewportArgs { pub fn op_webgpu_render_pass_set_viewport( state: &mut OpState, #[serde] args: RenderPassSetViewportArgs, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(args.render_pass_rid)?; @@ -72,7 +79,7 @@ pub fn op_webgpu_render_pass_set_scissor_rect( y: u32, width: u32, height: u32, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -94,7 +101,7 @@ pub fn op_webgpu_render_pass_set_blend_constant( state: &mut OpState, #[smi] render_pass_rid: ResourceId, #[serde] color: wgpu_types::Color, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -113,7 +120,7 @@ pub fn op_webgpu_render_pass_set_stencil_reference( state: &mut OpState, #[smi] render_pass_rid: ResourceId, reference: u32, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -132,7 +139,7 @@ pub fn op_webgpu_render_pass_begin_occlusion_query( state: &mut OpState, #[smi] render_pass_rid: ResourceId, query_index: u32, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -150,7 +157,7 @@ pub fn op_webgpu_render_pass_begin_occlusion_query( pub fn op_webgpu_render_pass_end_occlusion_query( state: &mut OpState, #[smi] render_pass_rid: ResourceId, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -168,7 +175,7 @@ pub fn op_webgpu_render_pass_execute_bundles( state: &mut OpState, #[smi] render_pass_rid: ResourceId, #[serde] bundles: Vec, -) -> Result { +) -> Result { let bundles = bundles .iter() .map(|rid| { @@ -178,7 +185,7 @@ pub fn op_webgpu_render_pass_execute_bundles( .get::(*rid)?; Ok(render_bundle_resource.1) }) - .collect::, deno_core::error::AnyError>>()?; + .collect::, ResourceError>>()?; let render_pass_resource = state .resource_table @@ -198,7 +205,7 @@ pub fn op_webgpu_render_pass_end( state: &mut OpState, #[smi] command_encoder_rid: ResourceId, #[smi] render_pass_rid: ResourceId, -) -> Result { +) -> Result { let command_encoder_resource = state .resource_table .get::( @@ -224,7 +231,7 @@ pub fn op_webgpu_render_pass_set_bind_group( #[buffer] dynamic_offsets_data: &[u32], #[number] dynamic_offsets_data_start: usize, #[number] dynamic_offsets_data_length: usize, -) -> Result { +) -> Result { let bind_group_resource = state .resource_table @@ -258,7 +265,7 @@ pub fn op_webgpu_render_pass_push_debug_group( state: &mut OpState, #[smi] render_pass_rid: ResourceId, #[string] group_label: &str, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -277,7 +284,7 @@ pub fn op_webgpu_render_pass_push_debug_group( pub fn op_webgpu_render_pass_pop_debug_group( state: &mut OpState, #[smi] render_pass_rid: ResourceId, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -295,7 +302,7 @@ pub fn op_webgpu_render_pass_insert_debug_marker( state: &mut OpState, #[smi] render_pass_rid: ResourceId, #[string] marker_label: &str, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -315,7 +322,7 @@ pub fn op_webgpu_render_pass_set_pipeline( state: &mut OpState, #[smi] render_pass_rid: ResourceId, pipeline: u32, -) -> Result { +) -> Result { let render_pipeline_resource = state .resource_table @@ -344,12 +351,10 @@ pub fn op_webgpu_render_pass_set_index_buffer( ) -> Result { let buffer_resource = state .resource_table - .get::(buffer) - .map_err(RenderPassError::Resource)?; + .get::(buffer)?; let render_pass_resource = state .resource_table - .get::(render_pass_rid) - .map_err(RenderPassError::Resource)?; + .get::(render_pass_rid)?; let size = if let Some(size) = size { Some(std::num::NonZeroU64::new(size).ok_or(RenderPassError::InvalidSize)?) @@ -379,12 +384,10 @@ pub fn op_webgpu_render_pass_set_vertex_buffer( ) -> Result { let buffer_resource = state .resource_table - .get::(buffer) - .map_err(RenderPassError::Resource)?; + .get::(buffer)?; let render_pass_resource = state .resource_table - .get::(render_pass_rid) - .map_err(RenderPassError::Resource)?; + .get::(render_pass_rid)?; let size = if let Some(size) = size { Some(std::num::NonZeroU64::new(size).ok_or(RenderPassError::InvalidSize)?) @@ -412,7 +415,7 @@ pub fn op_webgpu_render_pass_draw( instance_count: u32, first_vertex: u32, first_instance: u32, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -438,7 +441,7 @@ pub fn op_webgpu_render_pass_draw_indexed( first_index: u32, base_vertex: i32, first_instance: u32, -) -> Result { +) -> Result { let render_pass_resource = state .resource_table .get::(render_pass_rid)?; @@ -462,7 +465,7 @@ pub fn op_webgpu_render_pass_draw_indirect( #[smi] render_pass_rid: ResourceId, indirect_buffer: u32, #[number] indirect_offset: u64, -) -> Result { +) -> Result { let buffer_resource = state .resource_table .get::(indirect_buffer)?; @@ -486,7 +489,7 @@ pub fn op_webgpu_render_pass_draw_indexed_indirect( #[smi] render_pass_rid: ResourceId, indirect_buffer: u32, #[number] indirect_offset: u64, -) -> Result { +) -> Result { let buffer_resource = state .resource_table .get::(indirect_buffer)?; diff --git a/ext/webgpu/sampler.rs b/ext/webgpu/sampler.rs index e4f73e93ac437a..88c1947e635534 100644 --- a/ext/webgpu/sampler.rs +++ b/ext/webgpu/sampler.rs @@ -47,7 +47,7 @@ pub struct CreateSamplerArgs { pub fn op_webgpu_create_sampler( state: &mut OpState, #[serde] args: CreateSamplerArgs, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table diff --git a/ext/webgpu/shader.rs b/ext/webgpu/shader.rs index f57e24fa26c5c3..84615ea6f6a543 100644 --- a/ext/webgpu/shader.rs +++ b/ext/webgpu/shader.rs @@ -31,7 +31,7 @@ pub fn op_webgpu_create_shader_module( #[smi] device_rid: ResourceId, #[string] label: Cow, #[string] code: Cow, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table diff --git a/ext/webgpu/surface.rs b/ext/webgpu/surface.rs index e23c5f182b4561..23e617c7de2cd5 100644 --- a/ext/webgpu/surface.rs +++ b/ext/webgpu/surface.rs @@ -3,6 +3,7 @@ use std::borrow::Cow; use std::rc::Rc; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::Resource; @@ -10,14 +11,21 @@ use deno_core::ResourceId; use serde::Deserialize; use wgpu_types::SurfaceStatus; -use super::WebGpuResult; +use crate::error::WebGpuResult; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SurfaceError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + ResourceError, + ), + #[class(generic)] #[error("Invalid Surface Status")] InvalidStatus, + #[class(generic)] #[error(transparent)] Surface(wgpu_core::present::SurfaceError), } @@ -52,7 +60,7 @@ pub struct SurfaceConfigureArgs { pub fn op_webgpu_surface_configure( state: &mut OpState, #[serde] args: SurfaceConfigureArgs, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -90,13 +98,10 @@ pub fn op_webgpu_surface_get_current_texture( let instance = state.borrow::(); let device_resource = state .resource_table - .get::(device_rid) - .map_err(SurfaceError::Resource)?; + .get::(device_rid)?; let device = device_resource.1; - let surface_resource = state - .resource_table - .get::(surface_rid) - .map_err(SurfaceError::Resource)?; + let surface_resource = + state.resource_table.get::(surface_rid)?; let surface = surface_resource.1; let output = @@ -126,13 +131,10 @@ pub fn op_webgpu_surface_present( let instance = state.borrow::(); let device_resource = state .resource_table - .get::(device_rid) - .map_err(SurfaceError::Resource)?; + .get::(device_rid)?; let device = device_resource.1; - let surface_resource = state - .resource_table - .get::(surface_rid) - .map_err(SurfaceError::Resource)?; + let surface_resource = + state.resource_table.get::(surface_rid)?; let surface = surface_resource.1; let _ = gfx_select!(device => instance.surface_present(surface)) diff --git a/ext/webgpu/texture.rs b/ext/webgpu/texture.rs index a354567de84f78..10c5d902ee03b8 100644 --- a/ext/webgpu/texture.rs +++ b/ext/webgpu/texture.rs @@ -62,7 +62,7 @@ pub struct CreateTextureArgs { pub fn op_webgpu_create_texture( state: &mut OpState, #[serde] args: CreateTextureArgs, -) -> Result { +) -> Result { let instance = state.borrow::(); let device_resource = state .resource_table @@ -111,7 +111,7 @@ pub struct CreateTextureViewArgs { pub fn op_webgpu_create_texture_view( state: &mut OpState, #[serde] args: CreateTextureViewArgs, -) -> Result { +) -> Result { let instance = state.borrow::(); let texture_resource = state .resource_table diff --git a/ext/websocket/Cargo.toml b/ext/websocket/Cargo.toml index a1f1c98ba18613..dc24d52e1690ad 100644 --- a/ext/websocket/Cargo.toml +++ b/ext/websocket/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] bytes.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_net.workspace = true deno_permissions.workspace = true deno_tls.workspace = true diff --git a/ext/websocket/lib.rs b/ext/websocket/lib.rs index b47dbef3e17a42..deb424c9be4712 100644 --- a/ext/websocket/lib.rs +++ b/ext/websocket/lib.rs @@ -24,6 +24,7 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use deno_net::raw::NetworkStream; use deno_permissions::PermissionCheckError; use deno_tls::create_client_config; @@ -72,22 +73,30 @@ static USE_WRITEV: Lazy = Lazy::new(|| { false }); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum WebsocketError { + #[class(inherit)] #[error(transparent)] Url(url::ParseError), + #[class(inherit)] #[error(transparent)] Permission(#[from] PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(generic)] #[error(transparent)] Uri(#[from] http::uri::InvalidUri), + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), + #[class(type)] #[error(transparent)] WebSocket(#[from] fastwebsockets::WebSocketError), + #[class("DOMExceptionNetworkError")] #[error("failed to connect to WebSocket: {0}")] ConnectionFailed(#[from] HandshakeError), + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), } @@ -96,9 +105,7 @@ pub enum WebsocketError { pub struct WsRootStoreProvider(Option>); impl WsRootStoreProvider { - pub fn get_or_try_init( - &self, - ) -> Result, deno_core::error::AnyError> { + pub fn get_or_try_init(&self) -> Result, JsErrorBox> { Ok(match &self.0 { Some(provider) => Some(provider.get_or_try_init()?.clone()), None => None, @@ -183,32 +190,45 @@ pub struct CreateResponse { extensions: String, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum HandshakeError { + #[class(type)] #[error("Missing path in url")] MissingPath, + #[class(generic)] #[error("Invalid status code {0}")] InvalidStatusCode(StatusCode), + #[class(generic)] #[error(transparent)] Http(#[from] http::Error), + #[class(type)] #[error(transparent)] WebSocket(#[from] fastwebsockets::WebSocketError), + #[class(generic)] #[error("Didn't receive h2 alpn, aborting connection")] NoH2Alpn, + #[class(generic)] #[error(transparent)] Rustls(#[from] deno_tls::rustls::Error), + #[class(inherit)] #[error(transparent)] Io(#[from] std::io::Error), + #[class(generic)] #[error(transparent)] H2(#[from] h2::Error), + #[class(type)] #[error("Invalid hostname: '{0}'")] InvalidHostname(String), + #[class(inherit)] #[error(transparent)] - RootStoreError(deno_core::error::AnyError), + RootStoreError(JsErrorBox), + #[class(inherit)] #[error(transparent)] Tls(deno_tls::TlsError), + #[class(type)] #[error(transparent)] HeaderName(#[from] http::header::InvalidHeaderName), + #[class(type)] #[error(transparent)] HeaderValue(#[from] http::header::InvalidHeaderValue), } @@ -473,8 +493,7 @@ where let r = state .borrow_mut() .resource_table - .get::(cancel_rid) - .map_err(WebsocketError::Resource)?; + .get::(cancel_rid)?; Some(r.0.clone()) } else { None @@ -678,8 +697,7 @@ pub async fn op_ws_send_binary_async( let resource = state .borrow_mut() .resource_table - .get::(rid) - .map_err(WebsocketError::Resource)?; + .get::(rid)?; let data = data.to_vec(); let lock = resource.reserve_lock(); resource @@ -697,8 +715,7 @@ pub async fn op_ws_send_text_async( let resource = state .borrow_mut() .resource_table - .get::(rid) - .map_err(WebsocketError::Resource)?; + .get::(rid)?; let lock = resource.reserve_lock(); resource .write_frame( @@ -732,8 +749,7 @@ pub async fn op_ws_send_ping( let resource = state .borrow_mut() .resource_table - .get::(rid) - .map_err(WebsocketError::Resource)?; + .get::(rid)?; let lock = resource.reserve_lock(); resource .write_frame( diff --git a/ext/webstorage/Cargo.toml b/ext/webstorage/Cargo.toml index 2cae0d8e011ce1..044aa41f6a71bf 100644 --- a/ext/webstorage/Cargo.toml +++ b/ext/webstorage/Cargo.toml @@ -15,6 +15,7 @@ path = "lib.rs" [dependencies] deno_core.workspace = true +deno_error.workspace = true deno_web.workspace = true rusqlite.workspace = true thiserror.workspace = true diff --git a/ext/webstorage/lib.rs b/ext/webstorage/lib.rs index ca6b43a8279f8a..4653e1b9486d70 100644 --- a/ext/webstorage/lib.rs +++ b/ext/webstorage/lib.rs @@ -12,14 +12,18 @@ use rusqlite::params; use rusqlite::Connection; use rusqlite::OptionalExtension; -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum WebStorageError { + #[class("DOMExceptionNotSupportedError")] #[error("LocalStorage is not supported in this context.")] ContextNotSupported, + #[class(generic)] #[error(transparent)] Sqlite(#[from] rusqlite::Error), + #[class(inherit)] #[error(transparent)] Io(std::io::Error), + #[class("DOMExceptionQuotaExceededError")] #[error("Exceeded maximum storage size")] StorageExceeded, } diff --git a/resolvers/deno/Cargo.toml b/resolvers/deno/Cargo.toml index 71e75d5a1d257a..0eeec14e77c7f0 100644 --- a/resolvers/deno/Cargo.toml +++ b/resolvers/deno/Cargo.toml @@ -22,6 +22,7 @@ base32.workspace = true boxed_error.workspace = true dashmap = { workspace = true, optional = true } deno_config.workspace = true +deno_error.workspace = true deno_media_type.workspace = true deno_package_json.workspace = true deno_package_json.features = ["sync"] diff --git a/resolvers/deno/lib.rs b/resolvers/deno/lib.rs index ab01f397fb7bda..49b2cf4d1b9406 100644 --- a/resolvers/deno/lib.rs +++ b/resolvers/deno/lib.rs @@ -11,6 +11,7 @@ use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; use deno_config::workspace::WorkspaceResolvePkgJsonFolderError; use deno_config::workspace::WorkspaceResolver; +use deno_error::JsError; use deno_package_json::PackageJsonDepValue; use deno_package_json::PackageJsonDepValueParseError; use deno_semver::npm::NpmPackageReqReference; @@ -53,29 +54,39 @@ pub struct DenoResolution { pub found_package_json_dep: bool, } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct DenoResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum DenoResolveErrorKind { + #[class(type)] #[error("Importing from the vendor directory is not permitted. Use a remote specifier instead or disable vendoring.")] InvalidVendorFolderImport, + #[class(inherit)] #[error(transparent)] MappedResolution(#[from] MappedResolutionError), + #[class(inherit)] #[error(transparent)] MissingPackageNodeModulesFolder(#[from] MissingPackageNodeModulesFolderError), + #[class(inherit)] #[error(transparent)] Node(#[from] NodeResolveError), + #[class(inherit)] #[error(transparent)] NodeModulesOutOfDate(#[from] NodeModulesOutOfDateError), + #[class(inherit)] #[error(transparent)] PackageJsonDepValueParse(#[from] PackageJsonDepValueParseError), + #[class(inherit)] #[error(transparent)] PackageJsonDepValueUrlParse(url::ParseError), + #[class(inherit)] #[error(transparent)] PackageSubpathResolve(#[from] PackageSubpathResolveError), + #[class(inherit)] #[error(transparent)] ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), + #[class(inherit)] #[error(transparent)] WorkspaceResolvePkgJsonFolder(#[from] WorkspaceResolvePkgJsonFolderError), } diff --git a/resolvers/deno/npm/byonm.rs b/resolvers/deno/npm/byonm.rs index 3ceec368add81f..e9aad66e3fbdd3 100644 --- a/resolvers/deno/npm/byonm.rs +++ b/resolvers/deno/npm/byonm.rs @@ -30,14 +30,18 @@ use super::local::normalize_pkg_name_for_node_modules_deno_folder; use super::CliNpmReqResolver; use super::ResolvePkgFolderFromDenoReqError; -#[derive(Debug, Error)] +#[derive(Debug, Error, deno_error::JsError)] pub enum ByonmResolvePkgFolderFromDenoReqError { + #[class(generic)] #[error("Could not find \"{}\" in a node_modules folder. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", .0)] MissingAlias(StackString), + #[class(inherit)] #[error(transparent)] PackageJson(#[from] PackageJsonLoadError), + #[class(generic)] #[error("Could not find a matching package for 'npm:{}' in the node_modules directory. Ensure you have all your JSR and npm dependencies listed in your deno.json or package.json, then run `deno install`. Alternatively, turn on auto-install by specifying `\"nodeModulesDir\": \"auto\"` in your deno.json file.", .0)] UnmatchedReq(PackageReq), + #[class(inherit)] #[error(transparent)] Io(#[from] std::io::Error), } diff --git a/resolvers/deno/npm/mod.rs b/resolvers/deno/npm/mod.rs index 1501c059416a39..2e7c4c888b1b58 100644 --- a/resolvers/deno/npm/mod.rs +++ b/resolvers/deno/npm/mod.rs @@ -9,6 +9,7 @@ pub use byonm::ByonmNpmResolver; pub use byonm::ByonmNpmResolverCreateOptions; pub use byonm::ByonmNpmResolverRc; pub use byonm::ByonmResolvePkgFolderFromDenoReqError; +use deno_error::JsError; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageReq; pub use local::normalize_pkg_name_for_node_modules_deno_folder; @@ -35,49 +36,57 @@ use url::Url; mod byonm; mod local; -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error("Could not resolve \"{}\", but found it in a package.json. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", specifier)] pub struct NodeModulesOutOfDateError { pub specifier: String, } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error("Could not find '{}'. Deno expects the node_modules/ directory to be up to date. Did you forget to run `deno install`?", package_json_path.display())] pub struct MissingPackageNodeModulesFolderError { pub package_json_path: PathBuf, } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct ResolveIfForNpmPackageError( pub Box, ); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ResolveIfForNpmPackageErrorKind { + #[class(inherit)] #[error(transparent)] NodeResolve(#[from] NodeResolveError), + #[class(inherit)] #[error(transparent)] NodeModulesOutOfDate(#[from] NodeModulesOutOfDateError), } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct ResolveReqWithSubPathError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ResolveReqWithSubPathErrorKind { + #[class(inherit)] #[error(transparent)] MissingPackageNodeModulesFolder(#[from] MissingPackageNodeModulesFolderError), + #[class(inherit)] #[error(transparent)] ResolvePkgFolderFromDenoReq(#[from] ResolvePkgFolderFromDenoReqError), + #[class(inherit)] #[error(transparent)] PackageSubpathResolve(#[from] PackageSubpathResolveError), } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ResolvePkgFolderFromDenoReqError { - // todo(dsherret): don't use anyhow here - #[error(transparent)] - Managed(anyhow::Error), + #[class(inherit)] + #[error("{0}")] + Managed(Box), + #[class(inherit)] #[error(transparent)] Byonm(#[from] ByonmResolvePkgFolderFromDenoReqError), } diff --git a/resolvers/node/Cargo.toml b/resolvers/node/Cargo.toml index bdb0ba2ab1b920..31bca50a310355 100644 --- a/resolvers/node/Cargo.toml +++ b/resolvers/node/Cargo.toml @@ -20,6 +20,7 @@ sync = ["deno_package_json/sync"] anyhow.workspace = true async-trait.workspace = true boxed_error.workspace = true +deno_error.workspace = true deno_media_type.workspace = true deno_package_json.workspace = true deno_path_util.workspace = true diff --git a/resolvers/node/errors.rs b/resolvers/node/errors.rs index 4157bd4c85ae07..1b4ce460d13990 100644 --- a/resolvers/node/errors.rs +++ b/resolvers/node/errors.rs @@ -5,6 +5,7 @@ use std::fmt::Write; use std::path::PathBuf; use boxed_error::Boxed; +use deno_error::JsError; use thiserror::Error; use url::Url; @@ -55,8 +56,7 @@ pub trait NodeJsErrorCoded { fn code(&self) -> NodeJsErrorCode; } -// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError -#[derive(Debug, Clone, Error)] +#[derive(Debug, Clone, Error, JsError)] #[error( "[{}] Invalid module '{}' {}{}", self.code(), @@ -64,6 +64,7 @@ pub trait NodeJsErrorCoded { reason, maybe_referrer.as_ref().map(|referrer| format!(" imported from '{}'", referrer)).unwrap_or_default() )] +#[class(type)] pub struct InvalidModuleSpecifierError { pub request: String, pub reason: Cow<'static, str>, @@ -76,13 +77,15 @@ impl NodeJsErrorCoded for InvalidModuleSpecifierError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct LegacyResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum LegacyResolveErrorKind { + #[class(inherit)] #[error(transparent)] TypesNotFound(#[from] TypesNotFoundError), + #[class(inherit)] #[error(transparent)] ModuleNotFound(#[from] ModuleNotFoundError), } @@ -96,13 +99,14 @@ impl NodeJsErrorCoded for LegacyResolveError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] #[error( "Could not find package '{}' from referrer '{}'{}.", package_name, referrer, referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() )] +#[class(generic)] pub struct PackageNotFoundError { pub package_name: String, pub referrer: Url, @@ -116,12 +120,13 @@ impl NodeJsErrorCoded for PackageNotFoundError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] #[error( "Could not find referrer npm package '{}'{}.", referrer, referrer_extra.as_ref().map(|r| format!(" ({})", r)).unwrap_or_default() )] +#[class(generic)] pub struct ReferrerNotFoundError { pub referrer: Url, /// Extra information about the referrer. @@ -134,12 +139,14 @@ impl NodeJsErrorCoded for ReferrerNotFoundError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(inherit)] #[error("Failed resolving '{package_name}' from referrer '{referrer}'.")] pub struct PackageFolderResolveIoError { pub package_name: String, pub referrer: Url, #[source] + #[inherit] pub source: std::io::Error, } @@ -159,15 +166,18 @@ impl NodeJsErrorCoded for PackageFolderResolveError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageFolderResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageFolderResolveErrorKind { + #[class(inherit)] #[error(transparent)] PackageNotFound(#[from] PackageNotFoundError), + #[class(inherit)] #[error(transparent)] ReferrerNotFound(#[from] ReferrerNotFoundError), + #[class(inherit)] #[error(transparent)] Io(#[from] PackageFolderResolveIoError), } @@ -182,20 +192,24 @@ impl NodeJsErrorCoded for PackageSubpathResolveError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageSubpathResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageSubpathResolveErrorKind { + #[class(inherit)] #[error(transparent)] PkgJsonLoad(#[from] PackageJsonLoadError), + #[class(inherit)] #[error(transparent)] Exports(PackageExportsResolveError), + #[class(inherit)] #[error(transparent)] LegacyResolve(LegacyResolveError), } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error( "Target '{}' not found from '{}'{}{}.", target, @@ -241,19 +255,24 @@ impl NodeJsErrorCoded for PackageTargetResolveError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageTargetResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageTargetResolveErrorKind { + #[class(inherit)] #[error(transparent)] NotFound(#[from] PackageTargetNotFoundError), + #[class(inherit)] #[error(transparent)] InvalidPackageTarget(#[from] InvalidPackageTargetError), + #[class(inherit)] #[error(transparent)] InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[class(inherit)] #[error(transparent)] PackageResolve(#[from] PackageResolveError), + #[class(inherit)] #[error(transparent)] TypesNotFound(#[from] TypesNotFoundError), } @@ -267,24 +286,27 @@ impl NodeJsErrorCoded for PackageExportsResolveError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageExportsResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageExportsResolveErrorKind { + #[class(inherit)] #[error(transparent)] PackagePathNotExported(#[from] PackagePathNotExportedError), + #[class(inherit)] #[error(transparent)] PackageTargetResolve(#[from] PackageTargetResolveError), } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] #[error( "[{}] Could not find types for '{}'{}", self.code(), self.0.code_specifier, self.0.maybe_referrer.as_ref().map(|r| format!(" imported from '{}'", r)).unwrap_or_default(), )] +#[class(generic)] pub struct TypesNotFoundError(pub Box); #[derive(Debug)] @@ -299,7 +321,7 @@ impl NodeJsErrorCoded for TypesNotFoundError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] #[error( "[{}] Invalid package config. {}", self.code(), @@ -325,17 +347,18 @@ impl NodeJsErrorCoded for ClosestPkgJsonError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct ClosestPkgJsonError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ClosestPkgJsonErrorKind { + #[class(inherit)] #[error(transparent)] Load(#[from] PackageJsonLoadError), } -// todo(https://github.com/denoland/deno_core/issues/810): make this a TypeError -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(type)] #[error( "[{}] Package import specifier \"{}\" is not defined{}{}", self.code(), @@ -355,17 +378,21 @@ impl NodeJsErrorCoded for PackageImportNotDefinedError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageImportsResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageImportsResolveErrorKind { + #[class(inherit)] #[error(transparent)] ClosestPkgJson(ClosestPkgJsonError), + #[class(inherit)] #[error(transparent)] InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[class(inherit)] #[error(transparent)] NotDefined(#[from] PackageImportNotDefinedError), + #[class(inherit)] #[error(transparent)] Target(#[from] PackageTargetResolveError), } @@ -393,24 +420,30 @@ impl NodeJsErrorCoded for PackageResolveError { } } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct PackageResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum PackageResolveErrorKind { + #[class(inherit)] #[error(transparent)] ClosestPkgJson(#[from] ClosestPkgJsonError), + #[class(inherit)] #[error(transparent)] InvalidModuleSpecifier(#[from] InvalidModuleSpecifierError), + #[class(inherit)] #[error(transparent)] PackageFolderResolve(#[from] PackageFolderResolveError), + #[class(inherit)] #[error(transparent)] ExportsResolve(#[from] PackageExportsResolveError), + #[class(inherit)] #[error(transparent)] SubpathResolve(#[from] PackageSubpathResolveError), } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error("Failed joining '{path}' from '{base}'.")] pub struct NodeResolveRelativeJoinError { pub path: String, @@ -419,43 +452,54 @@ pub struct NodeResolveRelativeJoinError { pub source: url::ParseError, } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error("Failed resolving specifier from data url referrer.")] pub struct DataUrlReferrerError { #[source] pub source: url::ParseError, } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct NodeResolveError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum NodeResolveErrorKind { + #[class(inherit)] #[error(transparent)] RelativeJoin(#[from] NodeResolveRelativeJoinError), + #[class(inherit)] #[error(transparent)] PackageImportsResolve(#[from] PackageImportsResolveError), + #[class(inherit)] #[error(transparent)] UnsupportedEsmUrlScheme(#[from] UnsupportedEsmUrlSchemeError), + #[class(inherit)] #[error(transparent)] DataUrlReferrer(#[from] DataUrlReferrerError), + #[class(inherit)] #[error(transparent)] PackageResolve(#[from] PackageResolveError), + #[class(inherit)] #[error(transparent)] TypesNotFound(#[from] TypesNotFoundError), + #[class(inherit)] #[error(transparent)] FinalizeResolution(#[from] FinalizeResolutionError), } -#[derive(Debug, Boxed)] +#[derive(Debug, Boxed, JsError)] pub struct FinalizeResolutionError(pub Box); -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum FinalizeResolutionErrorKind { + #[class(inherit)] #[error(transparent)] InvalidModuleSpecifierError(#[from] InvalidModuleSpecifierError), + #[class(inherit)] #[error(transparent)] ModuleNotFound(#[from] ModuleNotFoundError), + #[class(inherit)] #[error(transparent)] UnsupportedDirImport(#[from] UnsupportedDirImportError), } @@ -470,7 +514,8 @@ impl NodeJsErrorCoded for FinalizeResolutionError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error( "[{}] Cannot find {} '{}'{}", self.code(), @@ -490,7 +535,8 @@ impl NodeJsErrorCoded for ModuleNotFoundError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] +#[class(generic)] #[error( "[{}] Directory import '{}' is not supported resolving ES modules{}", self.code(), @@ -508,7 +554,8 @@ impl NodeJsErrorCoded for UnsupportedDirImportError { } } -#[derive(Debug)] +#[derive(Debug, JsError)] +#[class(generic)] pub struct InvalidPackageTargetError { pub pkg_json_path: PathBuf, pub sub_path: String, @@ -564,7 +611,8 @@ impl NodeJsErrorCoded for InvalidPackageTargetError { } } -#[derive(Debug)] +#[derive(Debug, JsError)] +#[class(generic)] pub struct PackagePathNotExportedError { pub pkg_json_path: PathBuf, pub subpath: String, @@ -614,7 +662,8 @@ impl std::fmt::Display for PackagePathNotExportedError { } } -#[derive(Debug, Clone, Error)] +#[derive(Debug, Clone, Error, JsError)] +#[class(type)] #[error( "[{}] Only file and data URLs are supported by the default ESM loader.{} Received protocol '{}'", self.code(), @@ -631,20 +680,25 @@ impl NodeJsErrorCoded for UnsupportedEsmUrlSchemeError { } } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ResolvePkgJsonBinExportError { + #[class(inherit)] #[error(transparent)] PkgJsonLoad(#[from] PackageJsonLoadError), + #[class(generic)] #[error("Failed resolving binary export. '{}' did not exist", pkg_json_path.display())] MissingPkgJson { pkg_json_path: PathBuf }, + #[class(generic)] #[error("Failed resolving binary export. {message}")] InvalidBinProperty { message: String }, } -#[derive(Debug, Error)] +#[derive(Debug, Error, JsError)] pub enum ResolveBinaryCommandsError { + #[class(inherit)] #[error(transparent)] PkgJsonLoad(#[from] PackageJsonLoadError), + #[class(generic)] #[error("'{}' did not have a name", pkg_json_path.display())] MissingPkgJsonName { pkg_json_path: PathBuf }, } @@ -659,7 +713,7 @@ mod test { assert_eq!( PackagePathNotExportedError { pkg_json_path: PathBuf::from("test_path").join("package.json"), - subpath: "./jsx-runtime".to_string(), + subpath: "./jsx-runtime".to_string(), maybe_referrer: None, resolution_kind: NodeResolutionKind::Types }.to_string(), @@ -668,7 +722,7 @@ mod test { assert_eq!( PackagePathNotExportedError { pkg_json_path: PathBuf::from("test_path").join("package.json"), - subpath: ".".to_string(), + subpath: ".".to_string(), maybe_referrer: None, resolution_kind: NodeResolutionKind::Types }.to_string(), diff --git a/resolvers/npm_cache/fs_util.rs b/resolvers/npm_cache/fs_util.rs index 625d83e24d01dc..77269ebe0b447d 100644 --- a/resolvers/npm_cache/fs_util.rs +++ b/resolvers/npm_cache/fs_util.rs @@ -2,10 +2,9 @@ use std::io::ErrorKind; use std::path::Path; +use std::path::PathBuf; use std::time::Duration; -use anyhow::Context; -use anyhow::Error as AnyError; use sys_traits::FsCreateDirAll; use sys_traits::FsDirEntry; use sys_traits::FsHardLink; @@ -13,6 +12,56 @@ use sys_traits::FsReadDir; use sys_traits::FsRemoveFile; use sys_traits::ThreadSleep; +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum HardLinkDirRecursiveError { + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), + #[class(inherit)] + #[error("Creating {path}")] + Creating { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error("Creating {path}")] + Reading { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error("Dir {from} to {to}")] + Dir { + from: PathBuf, + to: PathBuf, + #[source] + #[inherit] + source: Box, + }, + #[class(inherit)] + #[error("Removing file to hard link {from} to {to}")] + RemoveFileToHardLink { + from: PathBuf, + to: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error("Hard linking {from} to {to}")] + HardLinking { + from: PathBuf, + to: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, +} + /// Hardlinks the files in one directory to another directory. /// /// Note: Does not handle symlinks. @@ -22,13 +71,19 @@ pub fn hard_link_dir_recursive< sys: &TSys, from: &Path, to: &Path, -) -> Result<(), AnyError> { - sys - .fs_create_dir_all(to) - .with_context(|| format!("Creating {}", to.display()))?; - let read_dir = sys - .fs_read_dir(from) - .with_context(|| format!("Reading {}", from.display()))?; +) -> Result<(), HardLinkDirRecursiveError> { + sys.fs_create_dir_all(to).map_err(|source| { + HardLinkDirRecursiveError::Creating { + path: to.to_path_buf(), + source, + } + })?; + let read_dir = sys.fs_read_dir(from).map_err(|source| { + HardLinkDirRecursiveError::Reading { + path: from.to_path_buf(), + source, + } + })?; for entry in read_dir { let entry = entry?; @@ -37,8 +92,12 @@ pub fn hard_link_dir_recursive< let new_to = to.join(entry.file_name()); if file_type.is_dir() { - hard_link_dir_recursive(sys, &new_from, &new_to).with_context(|| { - format!("Dir {} to {}", new_from.display(), new_to.display()) + hard_link_dir_recursive(sys, &new_from, &new_to).map_err(|source| { + HardLinkDirRecursiveError::Dir { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source: Box::new(source), + } })?; } else if file_type.is_file() { // note: chance for race conditions here between attempting to create, @@ -55,12 +114,10 @@ pub fn hard_link_dir_recursive< // faster to reduce contention. sys.thread_sleep(Duration::from_millis(10)); } else { - return Err(err).with_context(|| { - format!( - "Removing file to hard link {} to {}", - new_from.display(), - new_to.display() - ) + return Err(HardLinkDirRecursiveError::RemoveFileToHardLink { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source: err, }); } } @@ -74,22 +131,18 @@ pub fn hard_link_dir_recursive< if err.kind() == ErrorKind::AlreadyExists { sys.thread_sleep(Duration::from_millis(10)); } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) + return Err(HardLinkDirRecursiveError::HardLinking { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source: err, }); } } } else { - return Err(err).with_context(|| { - format!( - "Hard linking {} to {}", - new_from.display(), - new_to.display() - ) + return Err(HardLinkDirRecursiveError::HardLinking { + from: new_from.to_path_buf(), + to: new_to.to_path_buf(), + source: err, }); } } diff --git a/resolvers/npm_cache/lib.rs b/resolvers/npm_cache/lib.rs index 012c277e226526..90c07de5d598c0 100644 --- a/resolvers/npm_cache/lib.rs +++ b/resolvers/npm_cache/lib.rs @@ -6,11 +6,10 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use anyhow::bail; -use anyhow::Context; use anyhow::Error as AnyError; use deno_cache_dir::file_fetcher::CacheSetting; use deno_cache_dir::npm::NpmCacheDir; +use deno_error::JsErrorBox; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::NpmPackageCacheFolderId; @@ -45,9 +44,11 @@ pub use fs_util::hard_link_dir_recursive; pub use registry_info::get_package_url; pub use registry_info::RegistryInfoProvider; pub use remote::maybe_auth_header_for_npm_registry; +pub use tarball::EnsurePackageError; pub use tarball::TarballCache; -#[derive(Debug)] +#[derive(Debug, deno_error::JsError)] +#[class(generic)] pub struct DownloadError { pub status_code: Option, pub error: AnyError, @@ -203,7 +204,7 @@ impl< pub fn ensure_copy_package( &self, folder_id: &NpmPackageCacheFolderId, - ) -> Result<(), AnyError> { + ) -> Result<(), WithFolderSyncLockError> { let registry_url = self.npmrc.get_registry_url(&folder_id.nv.name); assert_ne!(folder_id.copy_index, 0); let package_folder = self.cache_dir.package_folder_for_id( @@ -237,6 +238,7 @@ impl< &original_package_folder, &package_folder, ) + .map_err(JsErrorBox::from_err) })?; Ok(()) } @@ -295,15 +297,15 @@ impl< pub fn load_package_info( &self, name: &str, - ) -> Result, AnyError> { + ) -> Result, serde_json::Error> { let file_cache_path = self.get_registry_package_info_file_cache_path(name); let file_text = match std::fs::read_to_string(file_cache_path) { Ok(file_text) => file_text, Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None), - Err(err) => return Err(err.into()), + Err(err) => return Err(serde_json::Error::io(err)), }; - Ok(serde_json::from_str(&file_text)?) + serde_json::from_str(&file_text) } pub fn save_package_info( @@ -330,18 +332,52 @@ impl< const NPM_PACKAGE_SYNC_LOCK_FILENAME: &str = ".deno_sync_lock"; +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum WithFolderSyncLockError { + #[class(inherit)] + #[error("Error creating '{path}'")] + CreateDir { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error("Error creating package sync lock file at '{path}'. Maybe try manually deleting this folder.")] + CreateLockFile { + path: PathBuf, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error(transparent)] + Action(#[from] JsErrorBox), + #[class(generic)] + #[error("Failed setting up package cache directory for {package}, then failed cleaning it up.\n\nOriginal error:\n\n{error}\n\nRemove error:\n\n{remove_error}\n\nPlease manually delete this folder or you will run into issues using this package in the future:\n\n{output_folder}")] + SetUpPackageCacheDir { + package: Box, + error: Box, + remove_error: std::io::Error, + output_folder: PathBuf, + }, +} + // todo(dsherret): use `sys` here instead of `std::fs`. fn with_folder_sync_lock( package: &PackageNv, output_folder: &Path, - action: impl FnOnce() -> Result<(), AnyError>, -) -> Result<(), AnyError> { + action: impl FnOnce() -> Result<(), JsErrorBox>, +) -> Result<(), WithFolderSyncLockError> { fn inner( output_folder: &Path, - action: impl FnOnce() -> Result<(), AnyError>, - ) -> Result<(), AnyError> { - std::fs::create_dir_all(output_folder).with_context(|| { - format!("Error creating '{}'.", output_folder.display()) + action: impl FnOnce() -> Result<(), JsErrorBox>, + ) -> Result<(), WithFolderSyncLockError> { + std::fs::create_dir_all(output_folder).map_err(|source| { + WithFolderSyncLockError::CreateDir { + path: output_folder.to_path_buf(), + source, + } })?; // This sync lock file is a way to ensure that partially created @@ -365,16 +401,10 @@ fn with_folder_sync_lock( let _ignore = std::fs::remove_file(&sync_lock_path); Ok(()) } - Err(err) => { - bail!( - concat!( - "Error creating package sync lock file at '{}'. ", - "Maybe try manually deleting this folder.\n\n{:#}", - ), - output_folder.display(), - err - ); - } + Err(err) => Err(WithFolderSyncLockError::CreateLockFile { + path: output_folder.to_path_buf(), + source: err, + }), } } @@ -383,19 +413,12 @@ fn with_folder_sync_lock( Err(err) => { if let Err(remove_err) = std::fs::remove_dir_all(output_folder) { if remove_err.kind() != std::io::ErrorKind::NotFound { - bail!( - concat!( - "Failed setting up package cache directory for {}, then ", - "failed cleaning it up.\n\nOriginal error:\n\n{}\n\n", - "Remove error:\n\n{}\n\nPlease manually ", - "delete this folder or you will run into issues using this ", - "package in the future:\n\n{}" - ), - package, - err, - remove_err, - output_folder.display(), - ); + return Err(WithFolderSyncLockError::SetUpPackageCacheDir { + package: Box::new(package.clone()), + error: Box::new(err), + remove_error: remove_err, + output_folder: output_folder.to_path_buf(), + }); } } Err(err) diff --git a/resolvers/npm_cache/registry_info.rs b/resolvers/npm_cache/registry_info.rs index 0637d75c198e05..ece797abba5ecc 100644 --- a/resolvers/npm_cache/registry_info.rs +++ b/resolvers/npm_cache/registry_info.rs @@ -4,20 +4,19 @@ use std::collections::HashMap; use std::collections::HashSet; use std::sync::Arc; -use anyhow::anyhow; -use anyhow::bail; -use anyhow::Context; -use anyhow::Error as AnyError; use async_trait::async_trait; +use deno_core::futures::future::LocalBoxFuture; +use deno_core::futures::FutureExt; +use deno_core::parking_lot::Mutex; +use deno_core::serde_json; +use deno_core::url::Url; +use deno_error::JsErrorBox; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; use deno_npm::registry::NpmRegistryApi; use deno_npm::registry::NpmRegistryPackageInfoLoadError; use deno_unsync::sync::AtomicFlag; use deno_unsync::sync::MultiRuntimeAsyncValueCreator; -use futures::future::LocalBoxFuture; -use futures::FutureExt; -use parking_lot::Mutex; use sys_traits::FsCreateDirAll; use sys_traits::FsHardLink; use sys_traits::FsMetadata; @@ -27,42 +26,15 @@ use sys_traits::FsRemoveFile; use sys_traits::FsRename; use sys_traits::SystemRandom; use sys_traits::ThreadSleep; -use thiserror::Error; -use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::NpmCache; use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; -type LoadResult = Result>; +type LoadResult = Result>; type LoadFuture = LocalBoxFuture<'static, LoadResult>; -#[derive(Debug, Error)] -#[error(transparent)] -pub struct AnyhowJsError(pub AnyError); - -impl deno_error::JsErrorClass for AnyhowJsError { - fn get_class(&self) -> &'static str { - "generic" - } - - fn get_message(&self) -> std::borrow::Cow<'static, str> { - self.0.to_string().into() - } - - fn get_additional_properties( - &self, - ) -> Option< - Vec<( - std::borrow::Cow<'static, str>, - std::borrow::Cow<'static, str>, - )>, - > { - None - } -} - #[derive(Debug, Clone)] enum FutureResult { PackageNotExists, @@ -80,7 +52,7 @@ enum MemoryCacheItem { FsCached, /// An item is memory cached when it fails saving to the file system cache /// or the package does not exist. - MemoryCached(Result>, Arc>), + MemoryCached(Result>, Arc>), } #[derive(Debug, Default)] @@ -125,6 +97,39 @@ impl MemoryCache { } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] +pub enum LoadFileCachedPackageInfoError { + #[error("Previously saved '{name}' from the npm cache, but now it fails to load: {err}")] + LoadPackageInfo { + err: serde_json::Error, + name: String, + }, + #[error("The package '{0}' previously saved its registry information to the file system cache, but that file no longer exists.")] + FileMissing(String), +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(inherit)] +#[error("Failed loading {url} for package \"{name}\"")] +pub struct LoadPackageInfoError { + url: Url, + name: String, + #[inherit] + #[source] + inner: LoadPackageInfoInnerError, +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LoadPackageInfoInnerError { + #[class(inherit)] + #[error("{0}")] + LoadFileCachedPackageInfo(LoadFileCachedPackageInfoError), + #[class(inherit)] + #[error("{0}")] + Other(Arc), +} + // todo(#27198): refactor to store this only in the http cache /// Downloads packuments from the npm registry. @@ -224,7 +229,7 @@ impl< package_name: name.to_string(), }), Err(err) => Err(NpmRegistryPackageInfoLoadError::LoadError(Arc::new( - AnyhowJsError(err), + JsErrorBox::from_err(err), ))), } } @@ -232,20 +237,20 @@ impl< pub async fn maybe_package_info( self: &Arc, name: &str, - ) -> Result>, AnyError> { - self.load_package_info_inner(name).await.with_context(|| { - format!( - "Failed loading {} for package \"{}\"", - get_package_url(&self.npmrc, name), - name - ) + ) -> Result>, LoadPackageInfoError> { + self.load_package_info_inner(name).await.map_err(|err| { + LoadPackageInfoError { + url: get_package_url(&self.npmrc, name), + name: name.to_string(), + inner: err, + } }) } async fn load_package_info_inner( self: &Arc, name: &str, - ) -> Result>, AnyError> { + ) -> Result>, LoadPackageInfoInnerError> { let (cache_item, clear_id) = { let mut mem_cache = self.memory_cache.lock(); let cache_item = if let Some(cache_item) = mem_cache.get(name) { @@ -270,9 +275,10 @@ impl< .load_file_cached_package_info(name) .await .map(|info| Some(Arc::new(info))) + .map_err(LoadPackageInfoInnerError::LoadFileCachedPackageInfo) } MemoryCacheItem::MemoryCached(maybe_info) => { - maybe_info.clone().map_err(|e| anyhow!("{}", e)) + maybe_info.clone().map_err(LoadPackageInfoInnerError::Other) } MemoryCacheItem::Pending(value_creator) => { match value_creator.get().await { @@ -304,13 +310,13 @@ impl< Ok(None) } Err(err) => { - let return_err = anyhow!("{:#}", err); + let return_err = err.clone(); self.memory_cache.lock().try_insert( clear_id, name, MemoryCacheItem::MemoryCached(Err(err)), ); - Err(return_err) + Err(LoadPackageInfoInnerError::Other(return_err)) } } } @@ -320,7 +326,7 @@ impl< async fn load_file_cached_package_info( &self, name: &str, - ) -> Result { + ) -> Result { // this scenario failing should be exceptionally rare so let's // deal with improving it only when anyone runs into an issue let maybe_package_info = deno_unsync::spawn_blocking({ @@ -330,17 +336,15 @@ impl< }) .await .unwrap() - .with_context(|| { - format!( - "Previously saved '{}' from the npm cache, but now it fails to load.", - name - ) + .map_err(|err| LoadFileCachedPackageInfoError::LoadPackageInfo { + err, + name: name.to_string(), })?; match maybe_package_info { Some(package_info) => Ok(package_info), - None => { - bail!("The package '{}' previously saved its registry information to the file system cache, but that file no longer exists.", name) - } + None => Err(LoadFileCachedPackageInfoError::FileMissing( + name.to_string(), + )), } } @@ -352,7 +356,8 @@ impl< match maybe_auth_header_for_npm_registry(registry_config) { Ok(maybe_auth_header) => maybe_auth_header, Err(err) => { - return std::future::ready(Err(Arc::new(err))).boxed_local() + return std::future::ready(Err(Arc::new(JsErrorBox::from_err(err)))) + .boxed_local() } }; let name = name.to_string(); @@ -363,14 +368,14 @@ impl< || downloader.previously_loaded_packages.lock().contains(&name) { // attempt to load from the file cache - if let Some(info) = downloader.cache.load_package_info(&name)? { + if let Some(info) = downloader.cache.load_package_info(&name).map_err(JsErrorBox::from_err)? { let result = Arc::new(info); return Ok(FutureResult::SavedFsCache(result)); } } if *downloader.cache.cache_setting() == NpmCacheSetting::Only { - return Err(deno_core::error::custom_error( + return Err(JsErrorBox::new( "NotCached", format!( "npm package not found in cache: \"{name}\", --cached-only is specified." @@ -386,12 +391,12 @@ impl< package_url, maybe_auth_header, ) - .await?; + .await.map_err(JsErrorBox::from_err)?; match maybe_bytes { Some(bytes) => { let future_result = deno_unsync::spawn_blocking( - move || -> Result { - let package_info = serde_json::from_slice(&bytes)?; + move || -> Result { + let package_info = serde_json::from_slice(&bytes).map_err(JsErrorBox::from_err)?; match downloader.cache.save_package_info(&name, &package_info) { Ok(()) => { Ok(FutureResult::SavedFsCache(Arc::new(package_info))) @@ -407,7 +412,8 @@ impl< } }, ) - .await??; + .await + .map_err(JsErrorBox::from_err)??; Ok(future_result) } None => Ok(FutureResult::PackageNotExists), diff --git a/resolvers/npm_cache/remote.rs b/resolvers/npm_cache/remote.rs index 16eb0f6cd9683b..0e04d0550272b0 100644 --- a/resolvers/npm_cache/remote.rs +++ b/resolvers/npm_cache/remote.rs @@ -1,17 +1,27 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use anyhow::bail; -use anyhow::Context; -use anyhow::Error as AnyError; use base64::prelude::BASE64_STANDARD; use base64::Engine; use deno_npm::npm_rc::RegistryConfig; use http::header; +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum AuthHeaderForNpmRegistryError { + #[class(type)] + #[error("Both the username and password must be provided for basic auth")] + Both, + #[class(type)] + #[error("The password in npmrc is an invalid base64 string: {0}")] + Base64(base64::DecodeError), +} + // TODO(bartlomieju): support more auth methods besides token and basic auth pub fn maybe_auth_header_for_npm_registry( registry_config: &RegistryConfig, -) -> Result, AnyError> { +) -> Result< + Option<(header::HeaderName, header::HeaderValue)>, + AuthHeaderForNpmRegistryError, +> { if let Some(token) = registry_config.auth_token.as_ref() { return Ok(Some(( header::AUTHORIZATION, @@ -33,7 +43,7 @@ pub fn maybe_auth_header_for_npm_registry( if (username.is_some() && password.is_none()) || (username.is_none() && password.is_some()) { - bail!("Both the username and password must be provided for basic auth") + return Err(AuthHeaderForNpmRegistryError::Both); } if username.is_some() && password.is_some() { @@ -42,7 +52,7 @@ pub fn maybe_auth_header_for_npm_registry( // https://github.com/npm/cli/blob/780afc50e3a345feb1871a28e33fa48235bc3bd5/workspaces/config/lib/index.js#L846-L851 let pw_base64 = BASE64_STANDARD .decode(password.unwrap()) - .with_context(|| "The password in npmrc is an invalid base64 string")?; + .map_err(AuthHeaderForNpmRegistryError::Base64)?; let bearer = BASE64_STANDARD.encode(format!( "{}:{}", username.unwrap(), diff --git a/resolvers/npm_cache/tarball.rs b/resolvers/npm_cache/tarball.rs index 49ca3bc7fddf88..d9575ba9cda922 100644 --- a/resolvers/npm_cache/tarball.rs +++ b/resolvers/npm_cache/tarball.rs @@ -3,18 +3,16 @@ use std::collections::HashMap; use std::sync::Arc; -use anyhow::anyhow; -use anyhow::bail; -use anyhow::Context; -use anyhow::Error as AnyError; +use deno_core::futures::future::LocalBoxFuture; +use deno_core::futures::FutureExt; +use deno_core::parking_lot::Mutex; +use deno_core::url::Url; +use deno_error::JsErrorBox; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageVersionDistInfo; use deno_semver::package::PackageNv; use deno_unsync::sync::MultiRuntimeAsyncValueCreator; -use futures::future::LocalBoxFuture; -use futures::FutureExt; use http::StatusCode; -use parking_lot::Mutex; use sys_traits::FsCreateDirAll; use sys_traits::FsHardLink; use sys_traits::FsMetadata; @@ -24,7 +22,6 @@ use sys_traits::FsRemoveFile; use sys_traits::FsRename; use sys_traits::SystemRandom; use sys_traits::ThreadSleep; -use url::Url; use crate::remote::maybe_auth_header_for_npm_registry; use crate::tarball_extract::verify_and_extract_tarball; @@ -33,7 +30,7 @@ use crate::NpmCache; use crate::NpmCacheHttpClient; use crate::NpmCacheSetting; -type LoadResult = Result<(), Arc>; +type LoadResult = Result<(), Arc>; type LoadFuture = LocalBoxFuture<'static, LoadResult>; #[derive(Debug, Clone)] @@ -41,7 +38,7 @@ enum MemoryCacheItem { /// The cache item hasn't finished yet. Pending(Arc>), /// The result errored. - Errored(Arc), + Errored(Arc), /// This package has already been cached. Cached, } @@ -73,6 +70,14 @@ pub struct TarballCache< memory_cache: Mutex>, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] +#[error("Failed caching npm package '{package_nv}'")] +pub struct EnsurePackageError { + package_nv: Box, + #[source] + source: Arc, +} impl< THttpClient: NpmCacheHttpClient, TSys: FsCreateDirAll @@ -108,18 +113,21 @@ impl< self: &Arc, package_nv: &PackageNv, dist: &NpmPackageVersionDistInfo, - ) -> Result<(), AnyError> { + ) -> Result<(), EnsurePackageError> { self .ensure_package_inner(package_nv, dist) .await - .with_context(|| format!("Failed caching npm package '{}'.", package_nv)) + .map_err(|source| EnsurePackageError { + package_nv: Box::new(package_nv.clone()), + source, + }) } async fn ensure_package_inner( self: &Arc, package_nv: &PackageNv, dist: &NpmPackageVersionDistInfo, - ) -> Result<(), AnyError> { + ) -> Result<(), Arc> { let cache_item = { let mut mem_cache = self.memory_cache.lock(); if let Some(cache_item) = mem_cache.get(package_nv) { @@ -141,7 +149,7 @@ impl< match cache_item { MemoryCacheItem::Cached => Ok(()), - MemoryCacheItem::Errored(err) => Err(anyhow!("{:#}", err)), + MemoryCacheItem::Errored(err) => Err(err), MemoryCacheItem::Pending(creator) => { let result = creator.get().await; match result { @@ -151,10 +159,9 @@ impl< Ok(()) } Err(err) => { - let result_err = anyhow!("{:#}", err); *self.memory_cache.lock().get_mut(package_nv).unwrap() = - MemoryCacheItem::Errored(err); - Err(result_err) + MemoryCacheItem::Errored(err.clone()); + Err(err) } } } @@ -176,7 +183,7 @@ impl< if should_use_cache && package_folder_exists { return Ok(()); } else if tarball_cache.cache.cache_setting() == &NpmCacheSetting::Only { - return Err(deno_core::error::custom_error( + return Err(JsErrorBox::new( "NotCached", format!( "npm package not found in cache: \"{}\", --cached-only is specified.", @@ -187,12 +194,12 @@ impl< } if dist.tarball.is_empty() { - bail!("Tarball URL was empty."); + return Err(JsErrorBox::generic("Tarball URL was empty.")); } // IMPORTANT: npm registries may specify tarball URLs at different URLS than the // registry, so we MUST get the auth for the tarball URL and not the registry URL. - let tarball_uri = Url::parse(&dist.tarball)?; + let tarball_uri = Url::parse(&dist.tarball).map_err(JsErrorBox::from_err)?; let maybe_registry_config = tarball_cache.npmrc.tarball_config(&tarball_uri); let maybe_auth_header = maybe_registry_config.and_then(|c| maybe_auth_header_for_npm_registry(c).ok()?); @@ -207,7 +214,7 @@ impl< && maybe_registry_config.is_none() && tarball_cache.npmrc.get_registry_config(&package_nv.name).auth_token.is_some() { - bail!( + return Err(JsErrorBox::generic(format!( concat!( "No auth for tarball URI, but present for scoped registry.\n\n", "Tarball URI: {}\n", @@ -216,9 +223,9 @@ impl< ), dist.tarball, registry_url, - ) + ))); } - return Err(err.into()) + return Err(JsErrorBox::from_err(err)) }, }; match maybe_bytes { @@ -247,10 +254,10 @@ impl< extraction_mode, ) }) - .await? + .await.map_err(JsErrorBox::from_err)?.map_err(JsErrorBox::from_err) } None => { - bail!("Could not find npm package tarball at: {}", dist.tarball); + Err(JsErrorBox::generic(format!("Could not find npm package tarball at: {}", dist.tarball))) } } } diff --git a/resolvers/npm_cache/tarball_extract.rs b/resolvers/npm_cache/tarball_extract.rs index cf408ac632f87c..e53e4544d22b1b 100644 --- a/resolvers/npm_cache/tarball_extract.rs +++ b/resolvers/npm_cache/tarball_extract.rs @@ -7,9 +7,6 @@ use std::io::ErrorKind; use std::path::Path; use std::path::PathBuf; -use anyhow::bail; -use anyhow::Context; -use anyhow::Error as AnyError; use base64::prelude::BASE64_STANDARD; use base64::Engine; use deno_npm::registry::NpmPackageVersionDistInfo; @@ -31,23 +28,37 @@ pub enum TarballExtractionMode { SiblingTempDir, } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum VerifyAndExtractTarballError { + #[class(inherit)] + #[error(transparent)] + TarballIntegrity(#[from] TarballIntegrityError), + #[class(inherit)] + #[error(transparent)] + ExtractTarball(#[from] ExtractTarballError), + #[class(inherit)] + #[error("Failed moving extracted tarball to final destination")] + MoveFailed(std::io::Error), +} + pub fn verify_and_extract_tarball( package_nv: &PackageNv, data: &[u8], dist_info: &NpmPackageVersionDistInfo, output_folder: &Path, extraction_mode: TarballExtractionMode, -) -> Result<(), AnyError> { +) -> Result<(), VerifyAndExtractTarballError> { verify_tarball_integrity(package_nv, data, &dist_info.integrity())?; match extraction_mode { - TarballExtractionMode::Overwrite => extract_tarball(data, output_folder), + TarballExtractionMode::Overwrite => { + extract_tarball(data, output_folder).map_err(Into::into) + } TarballExtractionMode::SiblingTempDir => { let temp_dir = get_atomic_dir_path(output_folder); extract_tarball(data, &temp_dir)?; rename_with_retries(&temp_dir, output_folder) - .map_err(AnyError::from) - .context("Failed moving extracted tarball to final destination.") + .map_err(VerifyAndExtractTarballError::MoveFailed) } } } @@ -89,11 +100,32 @@ fn rename_with_retries( } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(generic)] +pub enum TarballIntegrityError { + #[error("Not implemented hash function for {package}: {hash_kind}")] + NotImplementedHashFunction { + package: Box, + hash_kind: String, + }, + #[error("Not implemented integrity kind for {package}: {integrity}")] + NotImplementedIntegrityKind { + package: Box, + integrity: String, + }, + #[error("Tarball checksum did not match what was provided by npm registry for {package}.\n\nExpected: {expected}\nActual: {actual}")] + MismatchedChecksum { + package: Box, + expected: String, + actual: String, + }, +} + fn verify_tarball_integrity( package: &PackageNv, data: &[u8], npm_integrity: &NpmPackageVersionDistInfoIntegrity, -) -> Result<(), AnyError> { +) -> Result<(), TarballIntegrityError> { use ring::digest::Context; let (tarball_checksum, expected_checksum) = match npm_integrity { NpmPackageVersionDistInfoIntegrity::Integrity { @@ -103,11 +135,12 @@ fn verify_tarball_integrity( let algo = match *algorithm { "sha512" => &ring::digest::SHA512, "sha1" => &ring::digest::SHA1_FOR_LEGACY_USE_ONLY, - hash_kind => bail!( - "Not implemented hash function for {}: {}", - package, - hash_kind - ), + hash_kind => { + return Err(TarballIntegrityError::NotImplementedHashFunction { + package: Box::new(package.clone()), + hash_kind: hash_kind.to_string(), + }); + } }; let mut hash_ctx = Context::new(algo); hash_ctx.update(data); @@ -123,26 +156,39 @@ fn verify_tarball_integrity( (tarball_checksum, hex) } NpmPackageVersionDistInfoIntegrity::UnknownIntegrity(integrity) => { - bail!( - "Not implemented integrity kind for {}: {}", - package, - integrity - ) + return Err(TarballIntegrityError::NotImplementedIntegrityKind { + package: Box::new(package.clone()), + integrity: integrity.to_string(), + }); } }; if tarball_checksum != *expected_checksum { - bail!( - "Tarball checksum did not match what was provided by npm registry for {}.\n\nExpected: {}\nActual: {}", - package, - expected_checksum, - tarball_checksum, - ) + return Err(TarballIntegrityError::MismatchedChecksum { + package: Box::new(package.clone()), + expected: expected_checksum.to_string(), + actual: tarball_checksum, + }); } Ok(()) } -fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> { +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum ExtractTarballError { + #[class(inherit)] + #[error(transparent)] + Io(#[from] std::io::Error), + #[class(generic)] + #[error( + "Extracted directory '{0}' of npm tarball was not in output directory." + )] + NotInOutputDirectory(PathBuf), +} + +fn extract_tarball( + data: &[u8], + output_folder: &Path, +) -> Result<(), ExtractTarballError> { fs::create_dir_all(output_folder)?; let output_folder = fs::canonicalize(output_folder)?; let tar = GzDecoder::new(data); @@ -174,10 +220,9 @@ fn extract_tarball(data: &[u8], output_folder: &Path) -> Result<(), AnyError> { fs::create_dir_all(dir_path)?; let canonicalized_dir = fs::canonicalize(dir_path)?; if !canonicalized_dir.starts_with(&output_folder) { - bail!( - "Extracted directory '{}' of npm tarball was not in output directory.", - canonicalized_dir.display() - ) + return Err(ExtractTarballError::NotInOutputDirectory( + canonicalized_dir.to_path_buf(), + )); } } diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 9826a7459db2a4..72db8888f82cd1 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -79,6 +79,7 @@ deno_console.workspace = true deno_core.workspace = true deno_cron.workspace = true deno_crypto.workspace = true +deno_error.workspace = true deno_fetch.workspace = true deno_ffi.workspace = true deno_fs = { workspace = true, features = ["sync_fs"] } diff --git a/runtime/errors.rs b/runtime/errors.rs deleted file mode 100644 index feb494296723b4..00000000000000 --- a/runtime/errors.rs +++ /dev/null @@ -1,1983 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -//! There are many types of errors in Deno: -//! - AnyError: a generic wrapper that can encapsulate any type of error. -//! - JsError: a container for the error message and stack trace for exceptions -//! thrown in JavaScript code. We use this to pretty-print stack traces. -//! - Diagnostic: these are errors that originate in TypeScript's compiler. -//! They're similar to JsError, in that they have line numbers. But -//! Diagnostics are compile-time type errors, whereas JsErrors are runtime -//! exceptions. - -use std::env; -use std::error::Error; -use std::io; -use std::sync::Arc; - -use deno_broadcast_channel::BroadcastChannelError; -use deno_cache::CacheError; -use deno_canvas::CanvasError; -use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::url; -use deno_core::ModuleResolutionError; -use deno_cron::CronError; -use deno_crypto::DecryptError; -use deno_crypto::EncryptError; -use deno_crypto::ExportKeyError; -use deno_crypto::GenerateKeyError; -use deno_crypto::ImportKeyError; -use deno_fetch::FetchError; -use deno_fetch::HttpClientCreateError; -use deno_ffi::CallError; -use deno_ffi::CallbackError; -use deno_ffi::DlfcnError; -use deno_ffi::IRError; -use deno_ffi::ReprError; -use deno_ffi::StaticError; -use deno_fs::FsOpsError; -use deno_fs::FsOpsErrorKind; -use deno_http::HttpError; -use deno_http::HttpNextError; -use deno_http::WebSocketUpgradeError; -use deno_io::fs::FsError; -use deno_kv::KvCheckError; -use deno_kv::KvError; -use deno_kv::KvErrorKind; -use deno_kv::KvMutationError; -use deno_napi::NApiError; -use deno_net::ops::NetError; -use deno_net::QuicError; -use deno_permissions::ChildPermissionError; -use deno_permissions::NetDescriptorFromUrlParseError; -use deno_permissions::PathResolveError; -use deno_permissions::PermissionCheckError; -use deno_permissions::RunDescriptorParseError; -use deno_permissions::SysDescriptorParseError; -use deno_tls::TlsError; -use deno_web::BlobError; -use deno_web::CompressionError; -use deno_web::MessagePortError; -use deno_web::StreamResourceError; -use deno_web::WebError; -use deno_websocket::HandshakeError; -use deno_websocket::WebsocketError; -use deno_webstorage::WebStorageError; -use rustyline::error::ReadlineError; - -use crate::ops::fs_events::FsEventsError; -use crate::ops::http::HttpStartError; -use crate::ops::os::OsError; -use crate::ops::permissions::PermissionError; -use crate::ops::process::CheckRunPermissionError; -use crate::ops::process::ProcessError; -use crate::ops::signal::SignalError; -use crate::ops::tty::TtyError; -use crate::ops::web_worker::SyncFetchError; -use crate::ops::worker_host::CreateWorkerError; - -fn get_run_descriptor_parse_error(e: &RunDescriptorParseError) -> &'static str { - match e { - RunDescriptorParseError::Which(_) => "Error", - RunDescriptorParseError::PathResolve(e) => get_path_resolve_error(e), - RunDescriptorParseError::EmptyRunQuery => "Error", - } -} - -fn get_sys_descriptor_parse_error(e: &SysDescriptorParseError) -> &'static str { - match e { - SysDescriptorParseError::InvalidKind(_) => "TypeError", - SysDescriptorParseError::Empty => "Error", - } -} - -fn get_path_resolve_error(e: &PathResolveError) -> &'static str { - match e { - PathResolveError::CwdResolve(e) => get_io_error_class(e), - PathResolveError::EmptyPath => "Error", - } -} - -fn get_permission_error_class(e: &PermissionError) -> &'static str { - match e { - PermissionError::InvalidPermissionName(_) => "ReferenceError", - PermissionError::PathResolve(e) => get_path_resolve_error(e), - PermissionError::NetDescriptorParse(_) => "URIError", - PermissionError::SysDescriptorParse(e) => get_sys_descriptor_parse_error(e), - PermissionError::RunDescriptorParse(e) => get_run_descriptor_parse_error(e), - } -} - -fn get_permission_check_error_class(e: &PermissionCheckError) -> &'static str { - match e { - PermissionCheckError::PermissionDenied(_) => "NotCapable", - PermissionCheckError::InvalidFilePath(_) => "URIError", - PermissionCheckError::NetDescriptorForUrlParse(e) => match e { - NetDescriptorFromUrlParseError::MissingHost(_) => "TypeError", - NetDescriptorFromUrlParseError::Host(_) => "URIError", - }, - PermissionCheckError::SysDescriptorParse(e) => { - get_sys_descriptor_parse_error(e) - } - PermissionCheckError::PathResolve(e) => get_path_resolve_error(e), - PermissionCheckError::HostParse(_) => "URIError", - } -} - -fn get_dlopen_error_class(error: &dlopen2::Error) -> &'static str { - use dlopen2::Error::*; - match error { - NullCharacter(_) => "InvalidData", - OpeningLibraryError(ref e) => get_io_error_class(e), - SymbolGettingError(ref e) => get_io_error_class(e), - AddrNotMatchingDll(ref e) => get_io_error_class(e), - NullSymbol => "NotFound", - } -} - -fn get_env_var_error_class(error: &env::VarError) -> &'static str { - use env::VarError::*; - match error { - NotPresent => "NotFound", - NotUnicode(..) => "InvalidData", - } -} - -fn get_io_error_class(error: &io::Error) -> &'static str { - use io::ErrorKind::*; - match error.kind() { - NotFound => "NotFound", - PermissionDenied => "PermissionDenied", - ConnectionRefused => "ConnectionRefused", - ConnectionReset => "ConnectionReset", - ConnectionAborted => "ConnectionAborted", - NotConnected => "NotConnected", - AddrInUse => "AddrInUse", - AddrNotAvailable => "AddrNotAvailable", - BrokenPipe => "BrokenPipe", - AlreadyExists => "AlreadyExists", - InvalidInput => "TypeError", - InvalidData => "InvalidData", - TimedOut => "TimedOut", - Interrupted => "Interrupted", - WriteZero => "WriteZero", - UnexpectedEof => "UnexpectedEof", - Other => "Error", - WouldBlock => "WouldBlock", - // Non-exhaustive enum - might add new variants - // in the future - kind => { - let kind_str = kind.to_string(); - match kind_str.as_str() { - "FilesystemLoop" => "FilesystemLoop", - "IsADirectory" => "IsADirectory", - "NetworkUnreachable" => "NetworkUnreachable", - "NotADirectory" => "NotADirectory", - _ => "Error", - } - } - } -} - -fn get_module_resolution_error_class( - _: &ModuleResolutionError, -) -> &'static str { - "URIError" -} - -fn get_notify_error_class(error: ¬ify::Error) -> &'static str { - use notify::ErrorKind::*; - match error.kind { - Generic(_) => "Error", - Io(ref e) => get_io_error_class(e), - PathNotFound => "NotFound", - WatchNotFound => "NotFound", - InvalidConfig(_) => "InvalidData", - MaxFilesWatch => "Error", - } -} - -fn get_regex_error_class(error: ®ex::Error) -> &'static str { - use regex::Error::*; - match error { - Syntax(_) => "SyntaxError", - CompiledTooBig(_) => "RangeError", - _ => "Error", - } -} - -fn get_serde_json_error_class( - error: &serde_json::error::Error, -) -> &'static str { - use deno_core::serde_json::error::*; - match error.classify() { - Category::Io => error - .source() - .and_then(|e| e.downcast_ref::()) - .map(get_io_error_class) - .unwrap(), - Category::Syntax => "SyntaxError", - Category::Data => "InvalidData", - Category::Eof => "UnexpectedEof", - } -} - -fn get_url_parse_error_class(_error: &url::ParseError) -> &'static str { - "URIError" -} - -fn get_hyper_error_class(_error: &hyper::Error) -> &'static str { - "Http" -} - -fn get_hyper_util_error_class( - _error: &hyper_util::client::legacy::Error, -) -> &'static str { - "Http" -} - -fn get_hyper_v014_error_class(_error: &hyper_v014::Error) -> &'static str { - "Http" -} - -#[cfg(unix)] -pub fn get_nix_error_class(error: &nix::Error) -> &'static str { - match error { - nix::Error::ECHILD => "NotFound", - nix::Error::EINVAL => "TypeError", - nix::Error::ENOENT => "NotFound", - nix::Error::ENOTTY => "BadResource", - nix::Error::EPERM => "PermissionDenied", - nix::Error::ESRCH => "NotFound", - nix::Error::ELOOP => "FilesystemLoop", - nix::Error::ENOTDIR => "NotADirectory", - nix::Error::ENETUNREACH => "NetworkUnreachable", - nix::Error::EISDIR => "IsADirectory", - nix::Error::UnknownErrno => "Error", - &nix::Error::ENOTSUP => unreachable!(), - _ => "Error", - } -} - -fn get_webgpu_error_class(e: &deno_webgpu::InitError) -> &'static str { - match e { - deno_webgpu::InitError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - deno_webgpu::InitError::InvalidAdapter(_) => "Error", - deno_webgpu::InitError::RequestDevice(_) => "DOMExceptionOperationError", - deno_webgpu::InitError::InvalidDevice(_) => "Error", - } -} - -fn get_webgpu_buffer_error_class( - e: &deno_webgpu::buffer::BufferError, -) -> &'static str { - match e { - deno_webgpu::buffer::BufferError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - deno_webgpu::buffer::BufferError::InvalidUsage => "TypeError", - deno_webgpu::buffer::BufferError::Access(_) => "DOMExceptionOperationError", - } -} - -fn get_webgpu_bundle_error_class( - e: &deno_webgpu::bundle::BundleError, -) -> &'static str { - match e { - deno_webgpu::bundle::BundleError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - deno_webgpu::bundle::BundleError::InvalidSize => "TypeError", - } -} - -fn get_webgpu_byow_error_class( - e: &deno_webgpu::byow::ByowError, -) -> &'static str { - match e { - deno_webgpu::byow::ByowError::WebGPUNotInitiated => "TypeError", - deno_webgpu::byow::ByowError::InvalidParameters => "TypeError", - deno_webgpu::byow::ByowError::CreateSurface(_) => "Error", - deno_webgpu::byow::ByowError::InvalidSystem => "TypeError", - #[cfg(any( - target_os = "windows", - target_os = "linux", - target_os = "freebsd", - target_os = "openbsd" - ))] - deno_webgpu::byow::ByowError::NullWindow => "TypeError", - #[cfg(any( - target_os = "linux", - target_os = "freebsd", - target_os = "openbsd" - ))] - deno_webgpu::byow::ByowError::NullDisplay => "TypeError", - #[cfg(target_os = "macos")] - deno_webgpu::byow::ByowError::NSViewDisplay => "TypeError", - } -} - -fn get_webgpu_render_pass_error_class( - e: &deno_webgpu::render_pass::RenderPassError, -) -> &'static str { - match e { - deno_webgpu::render_pass::RenderPassError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - deno_webgpu::render_pass::RenderPassError::InvalidSize => "TypeError", - } -} - -fn get_webgpu_surface_error_class( - e: &deno_webgpu::surface::SurfaceError, -) -> &'static str { - match e { - deno_webgpu::surface::SurfaceError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - deno_webgpu::surface::SurfaceError::Surface(_) => "Error", - deno_webgpu::surface::SurfaceError::InvalidStatus => "Error", - } -} - -fn get_crypto_decrypt_error_class(e: &DecryptError) -> &'static str { - match e { - DecryptError::General(e) => get_crypto_shared_error_class(e), - DecryptError::Pkcs1(_) => "Error", - DecryptError::Failed => "DOMExceptionOperationError", - DecryptError::InvalidLength => "TypeError", - DecryptError::InvalidCounterLength => "TypeError", - DecryptError::InvalidTagLength => "TypeError", - DecryptError::InvalidKeyOrIv => "DOMExceptionOperationError", - DecryptError::TooMuchData => "DOMExceptionOperationError", - DecryptError::InvalidIvLength => "TypeError", - DecryptError::Rsa(_) => "DOMExceptionOperationError", - } -} - -fn get_crypto_encrypt_error_class(e: &EncryptError) -> &'static str { - match e { - EncryptError::General(e) => get_crypto_shared_error_class(e), - EncryptError::InvalidKeyOrIv => "DOMExceptionOperationError", - EncryptError::Failed => "DOMExceptionOperationError", - EncryptError::InvalidLength => "TypeError", - EncryptError::InvalidIvLength => "TypeError", - EncryptError::InvalidCounterLength => "TypeError", - EncryptError::TooMuchData => "DOMExceptionOperationError", - } -} - -fn get_crypto_shared_error_class(e: &deno_crypto::SharedError) -> &'static str { - match e { - deno_crypto::SharedError::ExpectedValidPrivateKey => "TypeError", - deno_crypto::SharedError::ExpectedValidPublicKey => "TypeError", - deno_crypto::SharedError::ExpectedValidPrivateECKey => "TypeError", - deno_crypto::SharedError::ExpectedValidPublicECKey => "TypeError", - deno_crypto::SharedError::ExpectedPrivateKey => "TypeError", - deno_crypto::SharedError::ExpectedPublicKey => "TypeError", - deno_crypto::SharedError::ExpectedSecretKey => "TypeError", - deno_crypto::SharedError::FailedDecodePrivateKey => { - "DOMExceptionOperationError" - } - deno_crypto::SharedError::FailedDecodePublicKey => { - "DOMExceptionOperationError" - } - deno_crypto::SharedError::UnsupportedFormat => { - "DOMExceptionNotSupportedError" - } - } -} - -fn get_crypto_ed25519_error_class( - e: &deno_crypto::Ed25519Error, -) -> &'static str { - match e { - deno_crypto::Ed25519Error::FailedExport => "DOMExceptionOperationError", - deno_crypto::Ed25519Error::Der(_) => "Error", - deno_crypto::Ed25519Error::KeyRejected(_) => "Error", - } -} - -fn get_crypto_export_key_error_class(e: &ExportKeyError) -> &'static str { - match e { - ExportKeyError::General(e) => get_crypto_shared_error_class(e), - ExportKeyError::Der(_) => "Error", - ExportKeyError::UnsupportedNamedCurve => "DOMExceptionNotSupportedError", - } -} - -fn get_crypto_generate_key_error_class(e: &GenerateKeyError) -> &'static str { - match e { - GenerateKeyError::General(e) => get_crypto_shared_error_class(e), - GenerateKeyError::BadPublicExponent => "DOMExceptionOperationError", - GenerateKeyError::InvalidHMACKeyLength => "DOMExceptionOperationError", - GenerateKeyError::FailedRSAKeySerialization => "DOMExceptionOperationError", - GenerateKeyError::InvalidAESKeyLength => "DOMExceptionOperationError", - GenerateKeyError::FailedRSAKeyGeneration => "DOMExceptionOperationError", - GenerateKeyError::FailedECKeyGeneration => "DOMExceptionOperationError", - GenerateKeyError::FailedKeyGeneration => "DOMExceptionOperationError", - } -} - -fn get_crypto_import_key_error_class(e: &ImportKeyError) -> &'static str { - match e { - ImportKeyError::General(e) => get_crypto_shared_error_class(e), - ImportKeyError::InvalidModulus => "DOMExceptionDataError", - ImportKeyError::InvalidPublicExponent => "DOMExceptionDataError", - ImportKeyError::InvalidPrivateExponent => "DOMExceptionDataError", - ImportKeyError::InvalidFirstPrimeFactor => "DOMExceptionDataError", - ImportKeyError::InvalidSecondPrimeFactor => "DOMExceptionDataError", - ImportKeyError::InvalidFirstCRTExponent => "DOMExceptionDataError", - ImportKeyError::InvalidSecondCRTExponent => "DOMExceptionDataError", - ImportKeyError::InvalidCRTCoefficient => "DOMExceptionDataError", - ImportKeyError::InvalidB64Coordinate => "DOMExceptionDataError", - ImportKeyError::InvalidRSAPublicKey => "DOMExceptionDataError", - ImportKeyError::InvalidRSAPrivateKey => "DOMExceptionDataError", - ImportKeyError::UnsupportedAlgorithm => "DOMExceptionDataError", - ImportKeyError::PublicKeyTooLong => "DOMExceptionDataError", - ImportKeyError::PrivateKeyTooLong => "DOMExceptionDataError", - ImportKeyError::InvalidP256ECPoint => "DOMExceptionDataError", - ImportKeyError::InvalidP384ECPoint => "DOMExceptionDataError", - ImportKeyError::InvalidP521ECPoint => "DOMExceptionDataError", - ImportKeyError::UnsupportedNamedCurve => "DOMExceptionDataError", - ImportKeyError::CurveMismatch => "DOMExceptionDataError", - ImportKeyError::InvalidKeyData => "DOMExceptionDataError", - ImportKeyError::InvalidJWKPrivateKey => "DOMExceptionDataError", - ImportKeyError::EllipticCurve(_) => "DOMExceptionDataError", - ImportKeyError::ExpectedValidPkcs8Data => "DOMExceptionDataError", - ImportKeyError::MalformedParameters => "DOMExceptionDataError", - ImportKeyError::Spki(_) => "DOMExceptionDataError", - ImportKeyError::InvalidP256ECSPKIData => "DOMExceptionDataError", - ImportKeyError::InvalidP384ECSPKIData => "DOMExceptionDataError", - ImportKeyError::InvalidP521ECSPKIData => "DOMExceptionDataError", - ImportKeyError::Der(_) => "DOMExceptionDataError", - } -} - -fn get_crypto_x448_error_class(e: &deno_crypto::X448Error) -> &'static str { - match e { - deno_crypto::X448Error::FailedExport => "DOMExceptionOperationError", - deno_crypto::X448Error::Der(_) => "Error", - } -} - -fn get_crypto_x25519_error_class(e: &deno_crypto::X25519Error) -> &'static str { - match e { - deno_crypto::X25519Error::FailedExport => "DOMExceptionOperationError", - deno_crypto::X25519Error::Der(_) => "Error", - } -} - -fn get_crypto_error_class(e: &deno_crypto::Error) -> &'static str { - match e { - deno_crypto::Error::Der(_) => "Error", - deno_crypto::Error::JoinError(_) => "Error", - deno_crypto::Error::MissingArgumentHash => "TypeError", - deno_crypto::Error::MissingArgumentSaltLength => "TypeError", - deno_crypto::Error::Other(e) => get_error_class_name(e).unwrap_or("Error"), - deno_crypto::Error::UnsupportedAlgorithm => "TypeError", - deno_crypto::Error::KeyRejected(_) => "Error", - deno_crypto::Error::RSA(_) => "Error", - deno_crypto::Error::Pkcs1(_) => "Error", - deno_crypto::Error::Unspecified(_) => "Error", - deno_crypto::Error::InvalidKeyFormat => "TypeError", - deno_crypto::Error::MissingArgumentPublicKey => "TypeError", - deno_crypto::Error::P256Ecdsa(_) => "Error", - deno_crypto::Error::DecodePrivateKey => "TypeError", - deno_crypto::Error::MissingArgumentNamedCurve => "TypeError", - deno_crypto::Error::MissingArgumentInfo => "TypeError", - deno_crypto::Error::HKDFLengthTooLarge => "DOMExceptionOperationError", - deno_crypto::Error::General(e) => get_crypto_shared_error_class(e), - deno_crypto::Error::Base64Decode(_) => "Error", - deno_crypto::Error::DataInvalidSize => "TypeError", - deno_crypto::Error::InvalidKeyLength => "TypeError", - deno_crypto::Error::EncryptionError => "DOMExceptionOperationError", - deno_crypto::Error::DecryptionError => "DOMExceptionOperationError", - deno_crypto::Error::ArrayBufferViewLengthExceeded(_) => { - "DOMExceptionQuotaExceededError" - } - } -} - -fn get_napi_error_class(e: &NApiError) -> &'static str { - match e { - NApiError::InvalidPath - | NApiError::LibLoading(_) - | NApiError::ModuleNotFound(_) => "TypeError", - NApiError::Permission(e) => get_permission_check_error_class(e), - } -} - -fn get_web_error_class(e: &WebError) -> &'static str { - match e { - WebError::Base64Decode => "DOMExceptionInvalidCharacterError", - WebError::InvalidEncodingLabel(_) => "RangeError", - WebError::BufferTooLong => "TypeError", - WebError::ValueTooLarge => "RangeError", - WebError::BufferTooSmall => "RangeError", - WebError::DataInvalid => "TypeError", - WebError::DataError(_) => "Error", - } -} - -fn get_web_compression_error_class(e: &CompressionError) -> &'static str { - match e { - CompressionError::UnsupportedFormat => "TypeError", - CompressionError::ResourceClosed => "TypeError", - CompressionError::IoTypeError(_) => "TypeError", - CompressionError::Io(e) => get_io_error_class(e), - } -} - -fn get_web_message_port_error_class(e: &MessagePortError) -> &'static str { - match e { - MessagePortError::InvalidTransfer => "TypeError", - MessagePortError::NotReady => "TypeError", - MessagePortError::TransferSelf => "TypeError", - MessagePortError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - MessagePortError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_web_stream_resource_error_class( - e: &StreamResourceError, -) -> &'static str { - match e { - StreamResourceError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - StreamResourceError::Js(_) => "TypeError", - } -} - -fn get_web_blob_error_class(e: &BlobError) -> &'static str { - match e { - BlobError::BlobPartNotFound => "TypeError", - BlobError::SizeLargerThanBlobPart => "TypeError", - BlobError::BlobURLsNotSupported => "TypeError", - BlobError::Url(_) => "Error", - } -} - -fn get_ffi_repr_error_class(e: &ReprError) -> &'static str { - match e { - ReprError::InvalidOffset => "TypeError", - ReprError::InvalidArrayBuffer => "TypeError", - ReprError::DestinationLengthTooShort => "RangeError", - ReprError::InvalidCString => "TypeError", - ReprError::CStringTooLong => "TypeError", - ReprError::InvalidBool => "TypeError", - ReprError::InvalidU8 => "TypeError", - ReprError::InvalidI8 => "TypeError", - ReprError::InvalidU16 => "TypeError", - ReprError::InvalidI16 => "TypeError", - ReprError::InvalidU32 => "TypeError", - ReprError::InvalidI32 => "TypeError", - ReprError::InvalidU64 => "TypeError", - ReprError::InvalidI64 => "TypeError", - ReprError::InvalidF32 => "TypeError", - ReprError::InvalidF64 => "TypeError", - ReprError::InvalidPointer => "TypeError", - ReprError::Permission(e) => get_permission_check_error_class(e), - } -} - -fn get_ffi_dlfcn_error_class(e: &DlfcnError) -> &'static str { - match e { - DlfcnError::RegisterSymbol { .. } => "Error", - DlfcnError::Dlopen(_) => "Error", - DlfcnError::Permission(e) => get_permission_check_error_class(e), - DlfcnError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_ffi_static_error_class(e: &StaticError) -> &'static str { - match e { - StaticError::Dlfcn(e) => get_ffi_dlfcn_error_class(e), - StaticError::InvalidTypeVoid => "TypeError", - StaticError::InvalidTypeStruct => "TypeError", - StaticError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_ffi_callback_error_class(e: &CallbackError) -> &'static str { - match e { - CallbackError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - CallbackError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - CallbackError::Permission(e) => get_permission_check_error_class(e), - } -} - -fn get_ffi_call_error_class(e: &CallError) -> &'static str { - match e { - CallError::IR(_) => "TypeError", - CallError::NonblockingCallFailure(_) => "Error", - CallError::InvalidSymbol(_) => "TypeError", - CallError::Permission(e) => get_permission_check_error_class(e), - CallError::Callback(e) => get_ffi_callback_error_class(e), - CallError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_webstorage_class_name(e: &WebStorageError) -> &'static str { - match e { - WebStorageError::ContextNotSupported => "DOMExceptionNotSupportedError", - WebStorageError::Sqlite(_) => "Error", - WebStorageError::Io(e) => get_io_error_class(e), - WebStorageError::StorageExceeded => "DOMExceptionQuotaExceededError", - } -} - -fn get_tls_error_class(e: &TlsError) -> &'static str { - match e { - TlsError::Rustls(_) => "Error", - TlsError::UnableAddPemFileToCert(e) => get_io_error_class(e), - TlsError::CertInvalid - | TlsError::CertsNotFound - | TlsError::KeysNotFound - | TlsError::KeyDecode => "InvalidData", - } -} - -pub fn get_cron_error_class(e: &CronError) -> &'static str { - match e { - CronError::Resource(e) => { - deno_core::error::get_custom_error_class(e).unwrap_or("Error") - } - CronError::NameExceeded(_) => "TypeError", - CronError::NameInvalid => "TypeError", - CronError::AlreadyExists => "TypeError", - CronError::TooManyCrons => "TypeError", - CronError::InvalidCron => "TypeError", - CronError::InvalidBackoff => "TypeError", - CronError::AcquireError(_) => "Error", - CronError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_canvas_error(e: &CanvasError) -> &'static str { - match e { - CanvasError::UnsupportedColorType(_) => "TypeError", - CanvasError::Image(_) => "Error", - } -} - -pub fn get_cache_error(error: &CacheError) -> &'static str { - match error { - CacheError::Sqlite(_) => "Error", - CacheError::JoinError(_) => "Error", - CacheError::Resource(err) => { - deno_core::error::get_custom_error_class(err).unwrap_or("Error") - } - CacheError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - CacheError::Io(err) => get_io_error_class(err), - } -} - -fn get_broadcast_channel_error(error: &BroadcastChannelError) -> &'static str { - match error { - BroadcastChannelError::Resource(err) => { - deno_core::error::get_custom_error_class(err).unwrap() - } - BroadcastChannelError::MPSCSendError(_) => "Error", - BroadcastChannelError::BroadcastSendError(_) => "Error", - BroadcastChannelError::Other(err) => { - get_error_class_name(err).unwrap_or("Error") - } - } -} - -fn get_fetch_error(error: &FetchError) -> &'static str { - match error { - FetchError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - FetchError::Permission(e) => get_permission_check_error_class(e), - FetchError::NetworkError => "TypeError", - FetchError::FsNotGet(_) => "TypeError", - FetchError::PathToUrl(_) => "TypeError", - FetchError::InvalidUrl(_) => "TypeError", - FetchError::InvalidHeaderName(_) => "TypeError", - FetchError::InvalidHeaderValue(_) => "TypeError", - FetchError::DataUrl(_) => "TypeError", - FetchError::Base64(_) => "TypeError", - FetchError::BlobNotFound => "TypeError", - FetchError::SchemeNotSupported(_) => "TypeError", - FetchError::RequestCanceled => "TypeError", - FetchError::Http(_) => "Error", - FetchError::ClientCreate(e) => get_http_client_create_error(e), - FetchError::Url(e) => get_url_parse_error_class(e), - FetchError::Method(_) => "TypeError", - FetchError::ClientSend(_) => "TypeError", - FetchError::RequestBuilderHook(_) => "TypeError", - FetchError::Io(e) => get_io_error_class(e), - } -} - -fn get_http_client_create_error(error: &HttpClientCreateError) -> &'static str { - match error { - HttpClientCreateError::Tls(_) => "TypeError", - HttpClientCreateError::InvalidUserAgent(_) => "TypeError", - HttpClientCreateError::InvalidProxyUrl => "TypeError", - HttpClientCreateError::HttpVersionSelectionInvalid => "TypeError", - HttpClientCreateError::RootCertStore(_) => "TypeError", - } -} - -fn get_websocket_error(error: &WebsocketError) -> &'static str { - match error { - WebsocketError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - WebsocketError::Permission(e) => get_permission_check_error_class(e), - WebsocketError::Url(e) => get_url_parse_error_class(e), - WebsocketError::Io(e) => get_io_error_class(e), - WebsocketError::WebSocket(_) => "TypeError", - WebsocketError::ConnectionFailed(_) => "DOMExceptionNetworkError", - WebsocketError::Uri(_) => "Error", - WebsocketError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - } -} - -fn get_websocket_handshake_error(error: &HandshakeError) -> &'static str { - match error { - HandshakeError::RootStoreError(e) => { - get_error_class_name(e).unwrap_or("Error") - } - HandshakeError::Tls(e) => get_tls_error_class(e), - HandshakeError::MissingPath => "TypeError", - HandshakeError::Http(_) => "Error", - HandshakeError::InvalidHostname(_) => "TypeError", - HandshakeError::Io(e) => get_io_error_class(e), - HandshakeError::Rustls(_) => "Error", - HandshakeError::H2(_) => "Error", - HandshakeError::NoH2Alpn => "Error", - HandshakeError::InvalidStatusCode(_) => "Error", - HandshakeError::WebSocket(_) => "TypeError", - HandshakeError::HeaderName(_) => "TypeError", - HandshakeError::HeaderValue(_) => "TypeError", - } -} - -fn get_fs_ops_error(error: &FsOpsError) -> &'static str { - use FsOpsErrorKind::*; - match error.as_kind() { - Io(e) => get_io_error_class(e), - OperationError(e) => get_fs_error(&e.err), - Permission(e) => get_permission_check_error_class(e), - Resource(e) | Other(e) => get_error_class_name(e).unwrap_or("Error"), - InvalidUtf8(_) => "InvalidData", - StripPrefix(_) => "Error", - Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - InvalidSeekMode(_) => "TypeError", - InvalidControlCharacter(_) => "Error", - InvalidCharacter(_) => "Error", - #[cfg(windows)] - InvalidTrailingCharacter => "Error", - NotCapableAccess { .. } => "NotCapable", - NotCapable(_) => "NotCapable", - } -} - -fn get_kv_error(error: &KvError) -> &'static str { - use KvErrorKind::*; - match error.as_kind() { - DatabaseHandler(e) | Resource(e) | Kv(e) => { - get_error_class_name(e).unwrap_or("Error") - } - TooManyRanges(_) => "TypeError", - TooManyEntries(_) => "TypeError", - TooManyChecks(_) => "TypeError", - TooManyMutations(_) => "TypeError", - TooManyKeys(_) => "TypeError", - InvalidLimit => "TypeError", - InvalidBoundaryKey => "TypeError", - KeyTooLargeToRead(_) => "TypeError", - KeyTooLargeToWrite(_) => "TypeError", - TotalMutationTooLarge(_) => "TypeError", - TotalKeyTooLarge(_) => "TypeError", - Io(e) => get_io_error_class(e), - QueueMessageNotFound => "TypeError", - StartKeyNotInKeyspace => "TypeError", - EndKeyNotInKeyspace => "TypeError", - StartKeyGreaterThanEndKey => "TypeError", - InvalidCheck(e) => match e { - KvCheckError::InvalidVersionstamp => "TypeError", - KvCheckError::Io(e) => get_io_error_class(e), - }, - InvalidMutation(e) => match e { - KvMutationError::BigInt(_) => "Error", - KvMutationError::Io(e) => get_io_error_class(e), - KvMutationError::InvalidMutationWithValue(_) => "TypeError", - KvMutationError::InvalidMutationWithoutValue(_) => "TypeError", - }, - InvalidEnqueue(e) => get_io_error_class(e), - EmptyKey => "TypeError", - ValueTooLarge(_) => "TypeError", - EnqueuePayloadTooLarge(_) => "TypeError", - InvalidCursor => "TypeError", - CursorOutOfBounds => "TypeError", - InvalidRange => "TypeError", - } -} - -fn get_net_error(error: &NetError) -> &'static str { - match error { - NetError::ListenerClosed => "BadResource", - NetError::ListenerBusy => "Busy", - NetError::SocketClosed => "BadResource", - NetError::SocketClosedNotConnected => "NotConnected", - NetError::SocketBusy => "Busy", - NetError::Io(e) => get_io_error_class(e), - NetError::AcceptTaskOngoing => "Busy", - NetError::RootCertStore(e) | NetError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - NetError::Permission(e) => get_permission_check_error_class(e), - NetError::NoResolvedAddress => "Error", - NetError::AddrParse(_) => "Error", - NetError::Map(e) => get_net_map_error(e), - NetError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - NetError::DnsNotFound(_) => "NotFound", - NetError::DnsNotConnected(_) => "NotConnected", - NetError::DnsTimedOut(_) => "TimedOut", - NetError::Dns(_) => "Error", - NetError::UnsupportedRecordType => "NotSupported", - NetError::InvalidUtf8(_) => "InvalidData", - NetError::UnexpectedKeyType => "Error", - NetError::InvalidHostname(_) => "TypeError", - NetError::TcpStreamBusy => "Busy", - NetError::Rustls(_) => "Error", - NetError::Tls(e) => get_tls_error_class(e), - NetError::ListenTlsRequiresKey => "InvalidData", - NetError::Reunite(_) => "Error", - } -} - -fn get_net_map_error(error: &deno_net::io::MapError) -> &'static str { - match error { - deno_net::io::MapError::Io(e) => get_io_error_class(e), - deno_net::io::MapError::NoResources => "Error", - } -} - -fn get_child_permission_error(e: &ChildPermissionError) -> &'static str { - match e { - ChildPermissionError::Escalation => "NotCapable", - ChildPermissionError::PathResolve(e) => get_path_resolve_error(e), - ChildPermissionError::NetDescriptorParse(_) => "URIError", - ChildPermissionError::EnvDescriptorParse(_) => "Error", - ChildPermissionError::SysDescriptorParse(e) => { - get_sys_descriptor_parse_error(e) - } - ChildPermissionError::RunDescriptorParse(e) => { - get_run_descriptor_parse_error(e) - } - } -} - -fn get_create_worker_error(error: &CreateWorkerError) -> &'static str { - match error { - CreateWorkerError::ClassicWorkers => "DOMExceptionNotSupportedError", - CreateWorkerError::Permission(e) => get_child_permission_error(e), - CreateWorkerError::ModuleResolution(e) => { - get_module_resolution_error_class(e) - } - CreateWorkerError::Io(e) => get_io_error_class(e), - CreateWorkerError::MessagePort(e) => get_web_message_port_error_class(e), - } -} - -fn get_tty_error(error: &TtyError) -> &'static str { - match error { - TtyError::Resource(e) | TtyError::Other(e) => { - get_error_class_name(e).unwrap_or("Error") - } - TtyError::Io(e) => get_io_error_class(e), - #[cfg(unix)] - TtyError::Nix(e) => get_nix_error_class(e), - } -} - -fn get_readline_error(error: &ReadlineError) -> &'static str { - match error { - ReadlineError::Io(e) => get_io_error_class(e), - ReadlineError::Eof => "Error", - ReadlineError::Interrupted => "Error", - #[cfg(unix)] - ReadlineError::Errno(e) => get_nix_error_class(e), - ReadlineError::WindowResized => "Error", - #[cfg(windows)] - ReadlineError::Decode(_) => "Error", - #[cfg(windows)] - ReadlineError::SystemError(_) => "Error", - _ => "Error", - } -} - -fn get_signal_error(error: &SignalError) -> &'static str { - match error { - SignalError::InvalidSignalStr(_) => "TypeError", - SignalError::InvalidSignalInt(_) => "TypeError", - SignalError::SignalNotAllowed(_) => "TypeError", - SignalError::Io(e) => get_io_error_class(e), - } -} - -fn get_fs_events_error(error: &FsEventsError) -> &'static str { - match error { - FsEventsError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - FsEventsError::Permission(e) => get_permission_check_error_class(e), - FsEventsError::Notify(e) => get_notify_error_class(e), - FsEventsError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - } -} - -fn get_http_start_error(error: &HttpStartError) -> &'static str { - match error { - HttpStartError::TcpStreamInUse => "Busy", - HttpStartError::TlsStreamInUse => "Busy", - HttpStartError::UnixSocketInUse => "Busy", - HttpStartError::ReuniteTcp(_) => "Error", - #[cfg(unix)] - HttpStartError::ReuniteUnix(_) => "Error", - HttpStartError::Io(e) => get_io_error_class(e), - HttpStartError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_process_error(error: &ProcessError) -> &'static str { - match error { - ProcessError::SpawnFailed { error, .. } => get_process_error(error), - ProcessError::FailedResolvingCwd(e) | ProcessError::Io(e) => { - get_io_error_class(e) - } - ProcessError::Permission(e) => get_permission_check_error_class(e), - ProcessError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - ProcessError::BorrowMut(_) => "Error", - ProcessError::Which(_) => "Error", - ProcessError::ChildProcessAlreadyTerminated => "TypeError", - ProcessError::Signal(e) => get_signal_error(e), - ProcessError::MissingCmd => "Error", - ProcessError::InvalidPid => "TypeError", - #[cfg(unix)] - ProcessError::Nix(e) => get_nix_error_class(e), - ProcessError::RunPermission(e) => match e { - CheckRunPermissionError::Permission(e) => { - get_permission_check_error_class(e) - } - CheckRunPermissionError::Other(e) => { - get_error_class_name(e).unwrap_or("Error") - } - }, - } -} - -fn get_http_error(error: &HttpError) -> &'static str { - match error { - HttpError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - HttpError::HyperV014(e) => get_hyper_v014_error_class(e), - HttpError::InvalidHeaderName(_) => "Error", - HttpError::InvalidHeaderValue(_) => "Error", - HttpError::Http(_) => "Error", - HttpError::ResponseHeadersAlreadySent => "Http", - HttpError::ConnectionClosedWhileSendingResponse => "Http", - HttpError::AlreadyInUse => "Http", - HttpError::Io(e) => get_io_error_class(e), - HttpError::NoResponseHeaders => "Http", - HttpError::ResponseAlreadyCompleted => "Http", - HttpError::UpgradeBodyUsed => "Http", - HttpError::Resource(e) | HttpError::Other(e) => { - get_error_class_name(e).unwrap_or("Error") - } - } -} - -fn get_http_next_error(error: &HttpNextError) -> &'static str { - match error { - HttpNextError::Io(e) => get_io_error_class(e), - HttpNextError::WebSocketUpgrade(e) => get_websocket_upgrade_error(e), - HttpNextError::Hyper(e) => get_hyper_error_class(e), - HttpNextError::JoinError(_) => "Error", - HttpNextError::Canceled(e) => { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - HttpNextError::UpgradeUnavailable(_) => "Error", - HttpNextError::HttpPropertyExtractor(e) | HttpNextError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - } -} - -fn get_websocket_upgrade_error(error: &WebSocketUpgradeError) -> &'static str { - match error { - WebSocketUpgradeError::InvalidHeaders => "Http", - WebSocketUpgradeError::HttpParse(_) => "Error", - WebSocketUpgradeError::Http(_) => "Error", - WebSocketUpgradeError::Utf8(_) => "Error", - WebSocketUpgradeError::InvalidHeaderName(_) => "Error", - WebSocketUpgradeError::InvalidHeaderValue(_) => "Error", - WebSocketUpgradeError::InvalidHttpStatusLine => "Http", - WebSocketUpgradeError::UpgradeBufferAlreadyCompleted => "Http", - } -} - -fn get_fs_error(e: &FsError) -> &'static str { - match &e { - FsError::Io(e) => get_io_error_class(e), - FsError::FileBusy => "Busy", - FsError::NotSupported => "NotSupported", - FsError::NotCapable(_) => "NotCapable", - } -} - -mod node { - pub use deno_node::ops::blocklist::BlocklistError; - pub use deno_node::ops::crypto::cipher::CipherContextError; - pub use deno_node::ops::crypto::cipher::CipherError; - pub use deno_node::ops::crypto::cipher::DecipherContextError; - pub use deno_node::ops::crypto::cipher::DecipherError; - pub use deno_node::ops::crypto::digest::HashError; - pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyDerError; - pub use deno_node::ops::crypto::keys::AsymmetricPrivateKeyError; - pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyDerError; - pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyError; - pub use deno_node::ops::crypto::keys::AsymmetricPublicKeyJwkError; - pub use deno_node::ops::crypto::keys::EcJwkError; - pub use deno_node::ops::crypto::keys::EdRawError; - pub use deno_node::ops::crypto::keys::ExportPrivateKeyPemError; - pub use deno_node::ops::crypto::keys::ExportPublicKeyPemError; - pub use deno_node::ops::crypto::keys::GenerateRsaPssError; - pub use deno_node::ops::crypto::keys::RsaJwkError; - pub use deno_node::ops::crypto::keys::RsaPssParamsParseError; - pub use deno_node::ops::crypto::keys::X509PublicKeyError; - pub use deno_node::ops::crypto::sign::KeyObjectHandlePrehashedSignAndVerifyError; - pub use deno_node::ops::crypto::x509::X509Error; - pub use deno_node::ops::crypto::DiffieHellmanError; - pub use deno_node::ops::crypto::EcdhEncodePubKey; - pub use deno_node::ops::crypto::HkdfError; - pub use deno_node::ops::crypto::Pbkdf2Error; - pub use deno_node::ops::crypto::PrivateEncryptDecryptError; - pub use deno_node::ops::crypto::ScryptAsyncError; - pub use deno_node::ops::crypto::SignEd25519Error; - pub use deno_node::ops::crypto::VerifyEd25519Error; - pub use deno_node::ops::fs::FsError; - pub use deno_node::ops::http::ConnError; - pub use deno_node::ops::http2::Http2Error; - pub use deno_node::ops::idna::IdnaError; - pub use deno_node::ops::ipc::IpcError; - pub use deno_node::ops::ipc::IpcJsonStreamError; - use deno_node::ops::os::priority::PriorityError; - pub use deno_node::ops::os::OsError; - pub use deno_node::ops::require::RequireError; - use deno_node::ops::require::RequireErrorKind; - pub use deno_node::ops::worker_threads::WorkerThreadsFilenameError; - pub use deno_node::ops::zlib::brotli::BrotliError; - pub use deno_node::ops::zlib::mode::ModeError; - pub use deno_node::ops::zlib::ZlibError; - - use super::get_error_class_name; - use super::get_io_error_class; - use super::get_permission_check_error_class; - use super::get_serde_json_error_class; - use super::get_url_parse_error_class; - - pub fn get_blocklist_error(error: &BlocklistError) -> &'static str { - match error { - BlocklistError::AddrParse(_) => "Error", - BlocklistError::IpNetwork(_) => "Error", - BlocklistError::InvalidAddress => "Error", - BlocklistError::IpVersionMismatch => "Error", - } - } - - pub fn get_fs_error(error: &FsError) -> &'static str { - match error { - FsError::Permission(e) => get_permission_check_error_class(e), - FsError::Io(e) => get_io_error_class(e), - #[cfg(windows)] - FsError::PathHasNoRoot => "Error", - #[cfg(not(any(unix, windows)))] - FsError::UnsupportedPlatform => "Error", - FsError::Fs(e) => super::get_fs_error(e), - } - } - - pub fn get_idna_error(error: &IdnaError) -> &'static str { - match error { - IdnaError::InvalidInput => "RangeError", - IdnaError::InputTooLong => "Error", - IdnaError::IllegalInput => "RangeError", - } - } - - pub fn get_ipc_json_stream_error(error: &IpcJsonStreamError) -> &'static str { - match error { - IpcJsonStreamError::Io(e) => get_io_error_class(e), - IpcJsonStreamError::SimdJson(_) => "Error", - } - } - - pub fn get_ipc_error(error: &IpcError) -> &'static str { - match error { - IpcError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - IpcError::IpcJsonStream(e) => get_ipc_json_stream_error(e), - IpcError::Canceled(e) => { - let io_err: std::io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - } - IpcError::SerdeJson(e) => get_serde_json_error_class(e), - } - } - - pub fn get_worker_threads_filename_error( - error: &WorkerThreadsFilenameError, - ) -> &'static str { - match error { - WorkerThreadsFilenameError::Permission(e) => { - get_error_class_name(e).unwrap_or("Error") - } - WorkerThreadsFilenameError::UrlParse(e) => get_url_parse_error_class(e), - WorkerThreadsFilenameError::InvalidRelativeUrl => "Error", - WorkerThreadsFilenameError::UrlFromPathString => "Error", - WorkerThreadsFilenameError::UrlToPathString => "Error", - WorkerThreadsFilenameError::UrlToPath => "Error", - WorkerThreadsFilenameError::FileNotFound(_) => "Error", - WorkerThreadsFilenameError::Fs(e) => super::get_io_error_class(e), - } - } - - pub fn get_require_error(error: &RequireError) -> &'static str { - use RequireErrorKind::*; - match error.as_kind() { - UrlParse(e) => get_url_parse_error_class(e), - Permission(e) => get_error_class_name(e).unwrap_or("Error"), - PackageExportsResolve(_) - | PackageJsonLoad(_) - | ClosestPkgJson(_) - | FilePathConversion(_) - | UrlConversion(_) - | ReadModule(_) - | PackageImportsResolve(_) => "Error", - Fs(e) | UnableToGetCwd(e) => super::get_io_error_class(e), - } - } - - pub fn get_http2_error(error: &Http2Error) -> &'static str { - match error { - Http2Error::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - Http2Error::UrlParse(e) => get_url_parse_error_class(e), - Http2Error::H2(_) => "Error", - } - } - - pub fn get_os_error(error: &OsError) -> &'static str { - match error { - OsError::Priority(e) => match e { - PriorityError::Io(e) => get_io_error_class(e), - #[cfg(windows)] - PriorityError::InvalidPriority => "TypeError", - }, - OsError::Permission(e) => get_permission_check_error_class(e), - OsError::FailedToGetCpuInfo => "TypeError", - OsError::FailedToGetUserInfo(e) => get_io_error_class(e), - } - } - - pub fn get_brotli_error(error: &BrotliError) -> &'static str { - match error { - BrotliError::InvalidEncoderMode => "TypeError", - BrotliError::CompressFailed => "TypeError", - BrotliError::DecompressFailed => "TypeError", - BrotliError::Join(_) => "Error", - BrotliError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - BrotliError::Io(e) => get_io_error_class(e), - } - } - - pub fn get_mode_error(_: &ModeError) -> &'static str { - "Error" - } - - pub fn get_zlib_error(e: &ZlibError) -> &'static str { - match e { - ZlibError::NotInitialized => "TypeError", - ZlibError::Mode(e) => get_mode_error(e), - ZlibError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } - } - - pub fn get_crypto_cipher_context_error( - e: &CipherContextError, - ) -> &'static str { - match e { - CipherContextError::ContextInUse => "TypeError", - CipherContextError::Cipher(e) => get_crypto_cipher_error(e), - CipherContextError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - } - } - - pub fn get_crypto_cipher_error(e: &CipherError) -> &'static str { - match e { - CipherError::InvalidIvLength => "TypeError", - CipherError::InvalidKeyLength => "RangeError", - CipherError::InvalidInitializationVector => "TypeError", - CipherError::CannotPadInputData => "TypeError", - CipherError::UnknownCipher(_) => "TypeError", - } - } - - pub fn get_crypto_decipher_context_error( - e: &DecipherContextError, - ) -> &'static str { - match e { - DecipherContextError::ContextInUse => "TypeError", - DecipherContextError::Decipher(e) => get_crypto_decipher_error(e), - DecipherContextError::Resource(e) => { - get_error_class_name(e).unwrap_or("Error") - } - } - } - - pub fn get_crypto_decipher_error(e: &DecipherError) -> &'static str { - match e { - DecipherError::InvalidIvLength => "TypeError", - DecipherError::InvalidKeyLength => "RangeError", - DecipherError::InvalidInitializationVector => "TypeError", - DecipherError::CannotUnpadInputData => "TypeError", - DecipherError::DataAuthenticationFailed => "TypeError", - DecipherError::SetAutoPaddingFalseAes128GcmUnsupported => "TypeError", - DecipherError::SetAutoPaddingFalseAes256GcmUnsupported => "TypeError", - DecipherError::UnknownCipher(_) => "TypeError", - } - } - - pub fn get_x509_error(_: &X509Error) -> &'static str { - "Error" - } - - pub fn get_crypto_key_object_handle_prehashed_sign_and_verify_error( - e: &KeyObjectHandlePrehashedSignAndVerifyError, - ) -> &'static str { - match e { - KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignatureEncoding => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPrivate => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaSignature(_) => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsa => "Error", - KeyObjectHandlePrehashedSignAndVerifyError::DigestNotAllowedForRsaPssSignature(_) => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithRsaPss => "Error", - KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigestWithDsa => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::RsaPssHashAlgorithmUnsupported => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::PrivateKeyDisallowsUsage { .. } => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::FailedToSignDigest => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForSigning => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedSigning => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForSigning => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::KeyIsNotPublicOrPrivate => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::InvalidDsaSignature => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::X25519KeyCannotBeUsedForVerification => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::Ed25519KeyCannotBeUsedForPrehashedVerification => "TypeError", - KeyObjectHandlePrehashedSignAndVerifyError::DhKeyCannotBeUsedForVerification => "TypeError", - } - } - - pub fn get_crypto_hash_error(_: &HashError) -> &'static str { - "Error" - } - - pub fn get_asymmetric_public_key_jwk_error( - e: &AsymmetricPublicKeyJwkError, - ) -> &'static str { - match e { - AsymmetricPublicKeyJwkError::UnsupportedJwkEcCurveP224 => "TypeError", - AsymmetricPublicKeyJwkError::JwkExportNotImplementedForKeyType => { - "TypeError" - } - AsymmetricPublicKeyJwkError::KeyIsNotAsymmetricPublicKey => "TypeError", - } - } - - pub fn get_generate_rsa_pss_error(_: &GenerateRsaPssError) -> &'static str { - "TypeError" - } - - pub fn get_asymmetric_private_key_der_error( - e: &AsymmetricPrivateKeyDerError, - ) -> &'static str { - match e { - AsymmetricPrivateKeyDerError::KeyIsNotAsymmetricPrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::InvalidRsaPrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::ExportingNonRsaPrivateKeyAsPkcs1Unsupported => "TypeError", - AsymmetricPrivateKeyDerError::InvalidEcPrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::ExportingNonEcPrivateKeyAsSec1Unsupported => "TypeError", - AsymmetricPrivateKeyDerError::ExportingNonRsaPssPrivateKeyAsPkcs8Unsupported => "Error", - AsymmetricPrivateKeyDerError::InvalidDsaPrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::InvalidX25519PrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::InvalidEd25519PrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::InvalidDhPrivateKey => "TypeError", - AsymmetricPrivateKeyDerError::UnsupportedKeyType(_) => "TypeError", - } - } - - pub fn get_asymmetric_public_key_der_error( - _: &AsymmetricPublicKeyDerError, - ) -> &'static str { - "TypeError" - } - - pub fn get_export_public_key_pem_error( - e: &ExportPublicKeyPemError, - ) -> &'static str { - match e { - ExportPublicKeyPemError::AsymmetricPublicKeyDer(e) => { - get_asymmetric_public_key_der_error(e) - } - ExportPublicKeyPemError::VeryLargeData => "TypeError", - ExportPublicKeyPemError::Der(_) => "Error", - } - } - - pub fn get_export_private_key_pem_error( - e: &ExportPrivateKeyPemError, - ) -> &'static str { - match e { - ExportPrivateKeyPemError::AsymmetricPublicKeyDer(e) => { - get_asymmetric_private_key_der_error(e) - } - ExportPrivateKeyPemError::VeryLargeData => "TypeError", - ExportPrivateKeyPemError::Der(_) => "Error", - } - } - - pub fn get_x509_public_key_error(e: &X509PublicKeyError) -> &'static str { - match e { - X509PublicKeyError::X509(_) => "Error", - X509PublicKeyError::Rsa(_) => "Error", - X509PublicKeyError::Asn1(_) => "Error", - X509PublicKeyError::Ec(_) => "Error", - X509PublicKeyError::UnsupportedEcNamedCurve => "TypeError", - X509PublicKeyError::MissingEcParameters => "TypeError", - X509PublicKeyError::MalformedDssPublicKey => "TypeError", - X509PublicKeyError::UnsupportedX509KeyType => "TypeError", - } - } - - pub fn get_rsa_jwk_error(e: &RsaJwkError) -> &'static str { - match e { - RsaJwkError::Base64(_) => "Error", - RsaJwkError::Rsa(_) => "Error", - RsaJwkError::MissingRsaPrivateComponent => "TypeError", - } - } - - pub fn get_ec_jwk_error(e: &EcJwkError) -> &'static str { - match e { - EcJwkError::Ec(_) => "Error", - EcJwkError::UnsupportedCurve(_) => "TypeError", - } - } - - pub fn get_ed_raw_error(e: &EdRawError) -> &'static str { - match e { - EdRawError::Ed25519Signature(_) => "Error", - EdRawError::InvalidEd25519Key => "TypeError", - EdRawError::UnsupportedCurve => "TypeError", - } - } - - pub fn get_pbkdf2_error(e: &Pbkdf2Error) -> &'static str { - match e { - Pbkdf2Error::UnsupportedDigest(_) => "TypeError", - Pbkdf2Error::Join(_) => "Error", - } - } - - pub fn get_scrypt_async_error(e: &ScryptAsyncError) -> &'static str { - match e { - ScryptAsyncError::Join(_) => "Error", - ScryptAsyncError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } - } - - pub fn get_hkdf_error_error(e: &HkdfError) -> &'static str { - match e { - HkdfError::ExpectedSecretKey => "TypeError", - HkdfError::HkdfExpandFailed => "TypeError", - HkdfError::UnsupportedDigest(_) => "TypeError", - HkdfError::Join(_) => "Error", - } - } - - pub fn get_rsa_pss_params_parse_error( - _: &RsaPssParamsParseError, - ) -> &'static str { - "TypeError" - } - - pub fn get_asymmetric_private_key_error( - e: &AsymmetricPrivateKeyError, - ) -> &'static str { - match e { - AsymmetricPrivateKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => "TypeError", - AsymmetricPrivateKeyError::InvalidEncryptedPemPrivateKey => "TypeError", - AsymmetricPrivateKeyError::InvalidPemPrivateKey => "TypeError", - AsymmetricPrivateKeyError::EncryptedPrivateKeyRequiresPassphraseToDecrypt => "TypeError", - AsymmetricPrivateKeyError::InvalidPkcs1PrivateKey => "TypeError", - AsymmetricPrivateKeyError::InvalidSec1PrivateKey => "TypeError", - AsymmetricPrivateKeyError::UnsupportedPemLabel(_) => "TypeError", - AsymmetricPrivateKeyError::RsaPssParamsParse(e) => get_rsa_pss_params_parse_error(e), - AsymmetricPrivateKeyError::InvalidEncryptedPkcs8PrivateKey => "TypeError", - AsymmetricPrivateKeyError::InvalidPkcs8PrivateKey => "TypeError", - AsymmetricPrivateKeyError::Pkcs1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError", - AsymmetricPrivateKeyError::Sec1PrivateKeyDoesNotSupportEncryptionWithPassphrase => "TypeError", - AsymmetricPrivateKeyError::UnsupportedEcNamedCurve => "TypeError", - AsymmetricPrivateKeyError::InvalidPrivateKey => "TypeError", - AsymmetricPrivateKeyError::InvalidDsaPrivateKey => "TypeError", - AsymmetricPrivateKeyError::MalformedOrMissingNamedCurveInEcParameters => "TypeError", - AsymmetricPrivateKeyError::UnsupportedKeyType(_) => "TypeError", - AsymmetricPrivateKeyError::UnsupportedKeyFormat(_) => "TypeError", - AsymmetricPrivateKeyError::InvalidX25519PrivateKey => "TypeError", - AsymmetricPrivateKeyError::X25519PrivateKeyIsWrongLength => "TypeError", - AsymmetricPrivateKeyError::InvalidEd25519PrivateKey => "TypeError", - AsymmetricPrivateKeyError::MissingDhParameters => "TypeError", - AsymmetricPrivateKeyError::UnsupportedPrivateKeyOid => "TypeError", - } - } - - pub fn get_asymmetric_public_key_error( - e: &AsymmetricPublicKeyError, - ) -> &'static str { - match e { - AsymmetricPublicKeyError::InvalidPemPrivateKeyInvalidUtf8(_) => { - "TypeError" - } - AsymmetricPublicKeyError::InvalidPemPublicKey => "TypeError", - AsymmetricPublicKeyError::InvalidPkcs1PublicKey => "TypeError", - AsymmetricPublicKeyError::AsymmetricPrivateKey(e) => { - get_asymmetric_private_key_error(e) - } - AsymmetricPublicKeyError::InvalidX509Certificate => "TypeError", - AsymmetricPublicKeyError::X509(_) => "Error", - AsymmetricPublicKeyError::X509PublicKey(e) => { - get_x509_public_key_error(e) - } - AsymmetricPublicKeyError::UnsupportedPemLabel(_) => "TypeError", - AsymmetricPublicKeyError::InvalidSpkiPublicKey => "TypeError", - AsymmetricPublicKeyError::UnsupportedKeyType(_) => "TypeError", - AsymmetricPublicKeyError::UnsupportedKeyFormat(_) => "TypeError", - AsymmetricPublicKeyError::Spki(_) => "Error", - AsymmetricPublicKeyError::Pkcs1(_) => "Error", - AsymmetricPublicKeyError::RsaPssParamsParse(_) => "TypeError", - AsymmetricPublicKeyError::MalformedDssPublicKey => "TypeError", - AsymmetricPublicKeyError::MalformedOrMissingNamedCurveInEcParameters => { - "TypeError" - } - AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInEcSpki => { - "TypeError" - } - AsymmetricPublicKeyError::Ec(_) => "Error", - AsymmetricPublicKeyError::UnsupportedEcNamedCurve => "TypeError", - AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInX25519Spki => { - "TypeError" - } - AsymmetricPublicKeyError::X25519PublicKeyIsTooShort => "TypeError", - AsymmetricPublicKeyError::InvalidEd25519PublicKey => "TypeError", - AsymmetricPublicKeyError::MissingDhParameters => "TypeError", - AsymmetricPublicKeyError::MalformedDhParameters => "TypeError", - AsymmetricPublicKeyError::MalformedOrMissingPublicKeyInDhSpki => { - "TypeError" - } - AsymmetricPublicKeyError::UnsupportedPrivateKeyOid => "TypeError", - } - } - - pub fn get_private_encrypt_decrypt_error( - e: &PrivateEncryptDecryptError, - ) -> &'static str { - match e { - PrivateEncryptDecryptError::Pkcs8(_) => "Error", - PrivateEncryptDecryptError::Spki(_) => "Error", - PrivateEncryptDecryptError::Utf8(_) => "Error", - PrivateEncryptDecryptError::Rsa(_) => "Error", - PrivateEncryptDecryptError::UnknownPadding => "TypeError", - } - } - - pub fn get_ecdh_encode_pub_key_error(e: &EcdhEncodePubKey) -> &'static str { - match e { - EcdhEncodePubKey::InvalidPublicKey => "TypeError", - EcdhEncodePubKey::UnsupportedCurve => "TypeError", - EcdhEncodePubKey::Sec1(_) => "Error", - } - } - - pub fn get_diffie_hellman_error(_: &DiffieHellmanError) -> &'static str { - "TypeError" - } - - pub fn get_sign_ed25519_error(_: &SignEd25519Error) -> &'static str { - "TypeError" - } - - pub fn get_verify_ed25519_error(_: &VerifyEd25519Error) -> &'static str { - "TypeError" - } - - pub fn get_conn_error(e: &ConnError) -> &'static str { - match e { - ConnError::Resource(e) => get_error_class_name(e).unwrap_or("Error"), - ConnError::Permission(e) => get_permission_check_error_class(e), - ConnError::InvalidUrl(_) => "TypeError", - ConnError::InvalidHeaderName(_) => "TypeError", - ConnError::InvalidHeaderValue(_) => "TypeError", - ConnError::Url(e) => get_url_parse_error_class(e), - ConnError::Method(_) => "TypeError", - ConnError::Io(e) => get_io_error_class(e), - ConnError::Hyper(e) => super::get_hyper_error_class(e), - ConnError::TlsStreamBusy => "Busy", - ConnError::TcpStreamBusy => "Busy", - ConnError::ReuniteTcp(_) => "Error", - ConnError::Canceled(_) => "Error", - } - } -} - -fn get_os_error(error: &OsError) -> &'static str { - match error { - OsError::Permission(e) => get_permission_check_error_class(e), - OsError::InvalidUtf8(_) => "InvalidData", - OsError::EnvEmptyKey => "TypeError", - OsError::EnvInvalidKey(_) => "TypeError", - OsError::EnvInvalidValue(_) => "TypeError", - OsError::Io(e) => get_io_error_class(e), - OsError::Var(e) => get_env_var_error_class(e), - } -} - -fn get_sync_fetch_error(error: &SyncFetchError) -> &'static str { - match error { - SyncFetchError::BlobUrlsNotSupportedInContext => "TypeError", - SyncFetchError::Io(e) => get_io_error_class(e), - SyncFetchError::InvalidScriptUrl => "TypeError", - SyncFetchError::InvalidStatusCode(_) => "TypeError", - SyncFetchError::ClassicScriptSchemeUnsupportedInWorkers(_) => "TypeError", - SyncFetchError::InvalidUri(_) => "Error", - SyncFetchError::InvalidMimeType(_) => "DOMExceptionNetworkError", - SyncFetchError::MissingMimeType => "DOMExceptionNetworkError", - SyncFetchError::Fetch(e) => get_fetch_error(e), - SyncFetchError::Join(_) => "Error", - SyncFetchError::Other(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -fn get_quic_error_class(error: &QuicError) -> &'static str { - match error { - QuicError::CannotListen => "Error", - QuicError::MissingTlsKey => "TypeError", - QuicError::InvalidDuration => "TypeError", - QuicError::UnableToResolve => "Error", - QuicError::StdIo(e) => get_io_error_class(e), - QuicError::PermissionCheck(e) => get_permission_check_error_class(e), - QuicError::VarIntBoundsExceeded(_) => "RangeError", - QuicError::Rustls(_) => "Error", - QuicError::Tls(e) => get_tls_error_class(e), - QuicError::ConnectionError(_) => "Error", - QuicError::ConnectError(_) => "Error", - QuicError::SendDatagramError(_) => "Error", - QuicError::ClosedStream(_) => "BadResource", - QuicError::BadResource(_) => "BadResource", - QuicError::MaxStreams(_) => "RangeError", - QuicError::Core(e) => get_error_class_name(e).unwrap_or("Error"), - } -} - -pub fn get_error_class_name(e: &AnyError) -> Option<&'static str> { - deno_core::error::get_custom_error_class(e) - .or_else(|| { - e.downcast_ref::() - .map(get_child_permission_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_permission_check_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_permission_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_fs_error)) - .or_else(|| { - e.downcast_ref::() - .map(node::get_blocklist_error) - }) - .or_else(|| e.downcast_ref::().map(node::get_fs_error)) - .or_else(|| { - e.downcast_ref::() - .map(node::get_idna_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_ipc_json_stream_error) - }) - .or_else(|| e.downcast_ref::().map(node::get_ipc_error)) - .or_else(|| { - e.downcast_ref::() - .map(node::get_worker_threads_filename_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_require_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_http2_error) - }) - .or_else(|| e.downcast_ref::().map(node::get_os_error)) - .or_else(|| { - e.downcast_ref::() - .map(node::get_brotli_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_mode_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_zlib_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_cipher_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_cipher_context_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_decipher_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_decipher_context_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_x509_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_key_object_handle_prehashed_sign_and_verify_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_crypto_hash_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_asymmetric_public_key_jwk_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_generate_rsa_pss_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_asymmetric_private_key_der_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_asymmetric_public_key_der_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_export_public_key_pem_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_export_private_key_pem_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_rsa_jwk_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_ec_jwk_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_ed_raw_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_pbkdf2_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_scrypt_async_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_hkdf_error_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_rsa_pss_params_parse_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_asymmetric_private_key_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_asymmetric_public_key_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_private_encrypt_decrypt_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_ecdh_encode_pub_key_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_diffie_hellman_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_sign_ed25519_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_verify_ed25519_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(node::get_conn_error) - }) - .or_else(|| e.downcast_ref::().map(get_napi_error_class)) - .or_else(|| e.downcast_ref::().map(get_web_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_create_worker_error) - }) - .or_else(|| e.downcast_ref::().map(get_tty_error)) - .or_else(|| e.downcast_ref::().map(get_readline_error)) - .or_else(|| e.downcast_ref::().map(get_signal_error)) - .or_else(|| e.downcast_ref::().map(get_fs_events_error)) - .or_else(|| e.downcast_ref::().map(get_http_start_error)) - .or_else(|| e.downcast_ref::().map(get_process_error)) - .or_else(|| e.downcast_ref::().map(get_os_error)) - .or_else(|| e.downcast_ref::().map(get_sync_fetch_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_web_compression_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_web_message_port_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_web_stream_resource_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_web_blob_error_class)) - .or_else(|| e.downcast_ref::().map(|_| "TypeError")) - .or_else(|| e.downcast_ref::().map(get_ffi_repr_error_class)) - .or_else(|| e.downcast_ref::().map(get_http_error)) - .or_else(|| e.downcast_ref::().map(get_http_next_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_websocket_upgrade_error) - }) - .or_else(|| e.downcast_ref::().map(get_fs_ops_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_ffi_dlfcn_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_ffi_static_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_ffi_callback_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_ffi_call_error_class)) - .or_else(|| e.downcast_ref::().map(get_tls_error_class)) - .or_else(|| e.downcast_ref::().map(get_cron_error_class)) - .or_else(|| e.downcast_ref::().map(get_canvas_error)) - .or_else(|| e.downcast_ref::().map(get_cache_error)) - .or_else(|| e.downcast_ref::().map(get_websocket_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_websocket_handshake_error) - }) - .or_else(|| e.downcast_ref::().map(get_kv_error)) - .or_else(|| e.downcast_ref::().map(get_fetch_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_http_client_create_error) - }) - .or_else(|| e.downcast_ref::().map(get_net_error)) - .or_else(|| { - e.downcast_ref::() - .map(get_net_map_error) - }) - .or_else(|| e.downcast_ref::().map(get_quic_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_broadcast_channel_error) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_buffer_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_bundle_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_byow_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_render_pass_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webgpu_surface_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_decrypt_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_encrypt_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_shared_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_ed25519_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_export_key_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_generate_key_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_import_key_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_x448_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_x25519_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_crypto_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_webstorage_class_name) - }) - .or_else(|| { - e.downcast_ref::() - .map(|_| "TypeError") - }) - .or_else(|| { - e.downcast_ref::() - .map(get_dlopen_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_hyper_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_hyper_util_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_hyper_v014_error_class) - }) - .or_else(|| { - e.downcast_ref::>() - .map(|e| get_hyper_v014_error_class(e)) - }) - .or_else(|| { - e.downcast_ref::().map(|e| { - let io_err: io::Error = e.to_owned().into(); - get_io_error_class(&io_err) - }) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_env_var_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_io_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_module_resolution_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_notify_error_class) - }) - .or_else(|| e.downcast_ref::().map(get_regex_error_class)) - .or_else(|| { - e.downcast_ref::() - .map(get_serde_json_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(get_url_parse_error_class) - }) - .or_else(|| { - e.downcast_ref::() - .map(|_| "TypeError") - }) - .or_else(|| { - #[cfg(unix)] - let maybe_get_nix_error_class = - || e.downcast_ref::().map(get_nix_error_class); - #[cfg(not(unix))] - let maybe_get_nix_error_class = || Option::<&'static str>::None; - (maybe_get_nix_error_class)() - }) -} diff --git a/runtime/lib.rs b/runtime/lib.rs index 3e48ec89c0a59a..9afe91cd253a17 100644 --- a/runtime/lib.rs +++ b/runtime/lib.rs @@ -27,7 +27,6 @@ pub use deno_websocket; pub use deno_webstorage; pub mod code_cache; -pub mod errors; pub mod fmt_errors; pub mod fs_util; pub mod inspector_server; diff --git a/runtime/ops/fs_events.rs b/runtime/ops/fs_events.rs index e8fb9f2ceffaa4..5336c232c97eab 100644 --- a/runtime/ops/fs_events.rs +++ b/runtime/ops/fs_events.rs @@ -17,6 +17,8 @@ use deno_core::OpState; use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; +use deno_error::builtin_classes::GENERIC_ERROR; +use deno_error::JsErrorClass; use deno_permissions::PermissionsContainer; use notify::event::Event as NotifyEvent; use notify::event::ModifyKind; @@ -116,14 +118,29 @@ fn is_file_removed(event_path: &PathBuf) -> bool { } } -#[derive(Debug, thiserror::Error)] +deno_error::js_error_wrapper!(NotifyError, JsNotifyError, |err| { + match &err.kind { + notify::ErrorKind::Generic(_) => GENERIC_ERROR.into(), + notify::ErrorKind::Io(e) => e.get_class(), + notify::ErrorKind::PathNotFound => "NotFound".into(), + notify::ErrorKind::WatchNotFound => "NotFound".into(), + notify::ErrorKind::InvalidConfig(_) => "InvalidData".into(), + notify::ErrorKind::MaxFilesWatch => GENERIC_ERROR.into(), + } +}); + +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum FsEventsError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(#[from] deno_core::error::ResourceError), + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error(transparent)] - Notify(#[from] NotifyError), + Notify(JsNotifyError), + #[class(inherit)] #[error(transparent)] Canceled(#[from] deno_core::Canceled), } @@ -143,7 +160,9 @@ fn start_watcher( let sender_clone = senders.clone(); let watcher: RecommendedWatcher = Watcher::new( move |res: Result| { - let res2 = res.map(FsEvent::from).map_err(FsEventsError::Notify); + let res2 = res + .map(FsEvent::from) + .map_err(|e| FsEventsError::Notify(JsNotifyError(e))); for (paths, sender) in sender_clone.lock().iter() { // Ignore result, if send failed it means that watcher was already closed, // but not all messages have been flushed. @@ -169,7 +188,8 @@ fn start_watcher( } }, Default::default(), - )?; + ) + .map_err(|e| FsEventsError::Notify(JsNotifyError(e)))?; state.put::(WatcherState { watcher, senders }); @@ -198,7 +218,10 @@ fn op_fs_events_open( .check_read(path, "Deno.watchFs()")?; let watcher = state.borrow_mut::(); - watcher.watcher.watch(&path, recursive_mode)?; + watcher + .watcher + .watch(&path, recursive_mode) + .map_err(|e| FsEventsError::Notify(JsNotifyError(e)))?; } let resource = FsEventsResource { receiver: AsyncRefCell::new(receiver), @@ -214,17 +237,13 @@ async fn op_fs_events_poll( state: Rc>, #[smi] rid: ResourceId, ) -> Result, FsEventsError> { - let resource = state - .borrow() - .resource_table - .get::(rid) - .map_err(FsEventsError::Resource)?; + let resource = state.borrow().resource_table.get::(rid)?; let mut receiver = RcRef::map(&resource, |r| &r.receiver).borrow_mut().await; let cancel = RcRef::map(resource, |r| &r.cancel); let maybe_result = receiver.recv().or_cancel(cancel).await?; match maybe_result { Some(Ok(value)) => Ok(Some(value)), - Some(Err(err)) => Err(FsEventsError::Notify(err)), + Some(Err(err)) => Err(FsEventsError::Notify(JsNotifyError(err))), None => Ok(None), } } diff --git a/runtime/ops/http.rs b/runtime/ops/http.rs index c9dc16cafe56c3..931b407779dc77 100644 --- a/runtime/ops/http.rs +++ b/runtime/ops/http.rs @@ -2,6 +2,7 @@ use std::rc::Rc; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::OpState; use deno_core::ResourceId; @@ -13,23 +14,34 @@ pub const UNSTABLE_FEATURE_NAME: &str = "http"; deno_core::extension!(deno_http_runtime, ops = [op_http_start],); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum HttpStartError { + #[class("Busy")] #[error("TCP stream is currently in use")] TcpStreamInUse, + #[class("Busy")] #[error("TLS stream is currently in use")] TlsStreamInUse, + #[class("Busy")] #[error("Unix socket is currently in use")] UnixSocketInUse, + #[class(generic)] #[error(transparent)] ReuniteTcp(#[from] tokio::net::tcp::ReuniteError), #[cfg(unix)] + #[class(generic)] #[error(transparent)] ReuniteUnix(#[from] tokio::net::unix::ReuniteError), + #[class(inherit)] #[error("{0}")] - Io(#[from] std::io::Error), + Io( + #[from] + #[inherit] + std::io::Error, + ), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Resource(#[inherit] ResourceError), } #[op2(fast)] @@ -89,5 +101,5 @@ fn op_http_start( )); } - Err(HttpStartError::Other(deno_core::error::bad_resource_id())) + Err(HttpStartError::Resource(ResourceError::BadResourceId)) } diff --git a/runtime/ops/os/mod.rs b/runtime/ops/os/mod.rs index a17b467cb70b43..ad3a292c3aa789 100644 --- a/runtime/ops/os/mod.rs +++ b/runtime/ops/os/mod.rs @@ -8,6 +8,7 @@ use deno_core::v8; use deno_core::OpState; use deno_node::NODE_ENV_VAR_ALLOWLIST; use deno_path_util::normalize_path; +use deno_permissions::PermissionCheckError; use deno_permissions::PermissionsContainer; use serde::Serialize; @@ -71,20 +72,27 @@ deno_core::extension!( }, ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum OsError { + #[class(inherit)] #[error(transparent)] - Permission(#[from] deno_permissions::PermissionCheckError), + Permission(#[from] PermissionCheckError), + #[class("InvalidData")] #[error("File name or path {0:?} is not valid UTF-8")] InvalidUtf8(std::ffi::OsString), + #[class(type)] #[error("Key is an empty string.")] EnvEmptyKey, + #[class(type)] #[error("Key contains invalid characters: {0:?}")] EnvInvalidKey(String), + #[class(type)] #[error("Value contains invalid characters: {0:?}")] EnvInvalidValue(String), + #[class(inherit)] #[error(transparent)] Var(#[from] env::VarError), + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), } @@ -129,7 +137,7 @@ fn op_set_env( #[serde] fn op_env( state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +) -> Result, PermissionCheckError> { state.borrow_mut::().check_env_all()?; Ok(env::vars().collect()) } @@ -195,7 +203,7 @@ fn op_exit(state: &mut OpState) { #[serde] fn op_loadavg( state: &mut OpState, -) -> Result<(f64, f64, f64), deno_core::error::AnyError> { +) -> Result<(f64, f64, f64), PermissionCheckError> { state .borrow_mut::() .check_sys("loadavg", "Deno.loadavg()")?; @@ -204,9 +212,7 @@ fn op_loadavg( #[op2(stack_trace, stack_trace)] #[string] -fn op_hostname( - state: &mut OpState, -) -> Result { +fn op_hostname(state: &mut OpState) -> Result { state .borrow_mut::() .check_sys("hostname", "Deno.hostname()")?; @@ -215,9 +221,7 @@ fn op_hostname( #[op2(stack_trace)] #[string] -fn op_os_release( - state: &mut OpState, -) -> Result { +fn op_os_release(state: &mut OpState) -> Result { state .borrow_mut::() .check_sys("osRelease", "Deno.osRelease()")?; @@ -280,7 +284,7 @@ impl From for NetworkInterface { #[serde] fn op_system_memory_info( state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +) -> Result, PermissionCheckError> { state .borrow_mut::() .check_sys("systemMemoryInfo", "Deno.systemMemoryInfo()")?; @@ -290,9 +294,7 @@ fn op_system_memory_info( #[cfg(not(windows))] #[op2(stack_trace)] #[smi] -fn op_gid( - state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +fn op_gid(state: &mut OpState) -> Result, PermissionCheckError> { state .borrow_mut::() .check_sys("gid", "Deno.gid()")?; @@ -306,9 +308,7 @@ fn op_gid( #[cfg(windows)] #[op2(stack_trace)] #[smi] -fn op_gid( - state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +fn op_gid(state: &mut OpState) -> Result, PermissionCheckError> { state .borrow_mut::() .check_sys("gid", "Deno.gid()")?; @@ -318,9 +318,7 @@ fn op_gid( #[cfg(not(windows))] #[op2(stack_trace)] #[smi] -fn op_uid( - state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +fn op_uid(state: &mut OpState) -> Result, PermissionCheckError> { state .borrow_mut::() .check_sys("uid", "Deno.uid()")?; @@ -334,9 +332,7 @@ fn op_uid( #[cfg(windows)] #[op2(stack_trace)] #[smi] -fn op_uid( - state: &mut OpState, -) -> Result, deno_core::error::AnyError> { +fn op_uid(state: &mut OpState) -> Result, PermissionCheckError> { state .borrow_mut::() .check_sys("uid", "Deno.uid()")?; @@ -517,7 +513,7 @@ fn rss() -> usize { } } -fn os_uptime(state: &mut OpState) -> Result { +fn os_uptime(state: &mut OpState) -> Result { state .borrow_mut::() .check_sys("osUptime", "Deno.osUptime()")?; @@ -526,8 +522,6 @@ fn os_uptime(state: &mut OpState) -> Result { #[op2(fast, stack_trace)] #[number] -fn op_os_uptime( - state: &mut OpState, -) -> Result { +fn op_os_uptime(state: &mut OpState) -> Result { os_uptime(state) } diff --git a/runtime/ops/permissions.rs b/runtime/ops/permissions.rs index 216637787ced1b..0ad14d433bdaaa 100644 --- a/runtime/ops/permissions.rs +++ b/runtime/ops/permissions.rs @@ -45,16 +45,21 @@ impl From for PermissionStatus { } } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PermissionError { + #[class(reference)] #[error("No such permission name: {0}")] InvalidPermissionName(String), + #[class(inherit)] #[error("{0}")] PathResolve(#[from] ::deno_permissions::PathResolveError), + #[class(uri)] #[error("{0}")] NetDescriptorParse(#[from] ::deno_permissions::NetDescriptorParseError), + #[class(inherit)] #[error("{0}")] SysDescriptorParse(#[from] ::deno_permissions::SysDescriptorParseError), + #[class(inherit)] #[error("{0}")] RunDescriptorParse(#[from] ::deno_permissions::RunDescriptorParseError), } diff --git a/runtime/ops/process.rs b/runtime/ops/process.rs index cda0c73111b59d..4c737f11261c6e 100644 --- a/runtime/ops/process.rs +++ b/runtime/ops/process.rs @@ -25,6 +25,7 @@ use deno_core::RcRef; use deno_core::Resource; use deno_core::ResourceId; use deno_core::ToJsBuffer; +use deno_error::JsErrorBox; use deno_io::fs::FileResource; use deno_io::ChildStderrResource; use deno_io::ChildStdinResource; @@ -107,8 +108,9 @@ impl StdioOrRid { match &self { StdioOrRid::Stdio(val) => Ok(val.as_stdio()), StdioOrRid::Rid(rid) => { - FileResource::with_file(state, *rid, |file| Ok(file.as_stdio()?)) - .map_err(ProcessError::Resource) + Ok(FileResource::with_file(state, *rid, |file| { + file.as_stdio().map_err(deno_error::JsErrorBox::from_err) + })?) } } } @@ -190,37 +192,73 @@ pub struct SpawnArgs { needs_npm_process_state: bool, } -#[derive(Debug, thiserror::Error)] +#[cfg(unix)] +deno_error::js_error_wrapper!(nix::Error, JsNixError, |err| { + match err { + nix::Error::ECHILD => "NotFound", + nix::Error::EINVAL => "TypeError", + nix::Error::ENOENT => "NotFound", + nix::Error::ENOTTY => "BadResource", + nix::Error::EPERM => "PermissionDenied", + nix::Error::ESRCH => "NotFound", + nix::Error::ELOOP => "FilesystemLoop", + nix::Error::ENOTDIR => "NotADirectory", + nix::Error::ENETUNREACH => "NetworkUnreachable", + nix::Error::EISDIR => "IsADirectory", + nix::Error::UnknownErrno => "Error", + &nix::Error::ENOTSUP => unreachable!(), + _ => "Error", + } +}); + +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ProcessError { + #[class(inherit)] #[error("Failed to spawn '{command}': {error}")] SpawnFailed { command: String, #[source] + #[inherit] error: Box, }, + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), #[cfg(unix)] + #[class(inherit)] #[error(transparent)] - Nix(nix::Error), + Nix(JsNixError), + #[class(inherit)] #[error("failed resolving cwd: {0}")] FailedResolvingCwd(#[source] std::io::Error), + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error(transparent)] RunPermission(#[from] CheckRunPermissionError), + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource(deno_core::error::ResourceError), + #[class(generic)] #[error(transparent)] BorrowMut(std::cell::BorrowMutError), + #[class(generic)] #[error(transparent)] Which(which::Error), + #[class(type)] #[error("Child process has already terminated.")] ChildProcessAlreadyTerminated, + #[class(type)] #[error("Invalid pid")] InvalidPid, + #[class(inherit)] #[error(transparent)] Signal(#[from] SignalError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), + #[class(type)] #[error("Missing cmd")] MissingCmd, // only for Deno.run } @@ -733,12 +771,14 @@ fn resolve_path(path: &str, cwd: &Path) -> PathBuf { deno_path_util::normalize_path(cwd.join(path)) } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CheckRunPermissionError { + #[class(inherit)] #[error(transparent)] Permission(#[from] deno_permissions::PermissionCheckError), + #[class(inherit)] #[error("{0}")] - Other(deno_core::error::AnyError), + Other(JsErrorBox), } fn check_run_permission( @@ -755,7 +795,7 @@ fn check_run_permission( // we don't allow users to launch subprocesses with any LD_ or DYLD_* // env vars set because this allows executing code (ex. LD_PRELOAD) return Err(CheckRunPermissionError::Other( - deno_core::error::custom_error( + JsErrorBox::new( "NotCapable", format!( "Requires --allow-run permissions to spawn subprocess with {0} environment variable{1}. Alternatively, spawn with {2} environment variable{1} unset.", @@ -1079,8 +1119,10 @@ mod deprecated { use nix::sys::signal::kill as unix_kill; use nix::sys::signal::Signal; use nix::unistd::Pid; - let sig = Signal::try_from(signo).map_err(ProcessError::Nix)?; - unix_kill(Pid::from_raw(pid), Some(sig)).map_err(ProcessError::Nix) + let sig = + Signal::try_from(signo).map_err(|e| ProcessError::Nix(JsNixError(e)))?; + unix_kill(Pid::from_raw(pid), Some(sig)) + .map_err(|e| ProcessError::Nix(JsNixError(e))) } #[cfg(not(unix))] diff --git a/runtime/ops/signal.rs b/runtime/ops/signal.rs index dfb52463cd82fc..beefa1cd78de8b 100644 --- a/runtime/ops/signal.rs +++ b/runtime/ops/signal.rs @@ -9,6 +9,7 @@ use std::sync::atomic::AtomicBool; #[cfg(unix)] use std::sync::Arc; +use deno_core::error::ResourceError; use deno_core::op2; use deno_core::AsyncRefCell; use deno_core::CancelFuture; @@ -43,14 +44,18 @@ deno_core::extension!( } ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SignalError { + #[class(type)] #[error(transparent)] InvalidSignalStr(#[from] crate::signal::InvalidSignalStrError), + #[class(type)] #[error(transparent)] InvalidSignalInt(#[from] crate::signal::InvalidSignalIntError), + #[class(type)] #[error("Binding to signal '{0}' is not allowed")] SignalNotAllowed(String), + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), } @@ -223,7 +228,7 @@ fn op_signal_bind( async fn op_signal_poll( state: Rc>, #[smi] rid: ResourceId, -) -> Result { +) -> Result { let resource = state .borrow_mut() .resource_table @@ -242,7 +247,7 @@ async fn op_signal_poll( pub fn op_signal_unbind( state: &mut OpState, #[smi] rid: ResourceId, -) -> Result<(), deno_core::error::AnyError> { +) -> Result<(), ResourceError> { let resource = state.resource_table.take::(rid)?; #[cfg(unix)] diff --git a/runtime/ops/tty.rs b/runtime/ops/tty.rs index 4843acccf3b329..d9912839b870a2 100644 --- a/runtime/ops/tty.rs +++ b/runtime/ops/tty.rs @@ -14,6 +14,9 @@ use deno_core::parking_lot::Mutex; use deno_core::OpState; #[cfg(unix)] use deno_core::ResourceId; +use deno_error::builtin_classes::GENERIC_ERROR; +use deno_error::JsErrorBox; +use deno_error::JsErrorClass; #[cfg(windows)] use deno_io::WinTtyState; #[cfg(unix)] @@ -52,6 +55,9 @@ use winapi::shared::minwindef::DWORD; #[cfg(windows)] use winapi::um::wincon; +#[cfg(unix)] +use crate::ops::process::JsNixError; + deno_core::extension!( deno_tty, ops = [op_set_raw, op_console_size, op_read_line_prompt], @@ -61,17 +67,29 @@ deno_core::extension!( }, ); -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum TtyError { + #[class(inherit)] #[error(transparent)] - Resource(deno_core::error::AnyError), + Resource( + #[from] + #[inherit] + deno_core::error::ResourceError, + ), + #[class(inherit)] #[error("{0}")] - Io(#[from] std::io::Error), + Io( + #[from] + #[inherit] + Error, + ), #[cfg(unix)] + #[class(inherit)] #[error(transparent)] - Nix(nix::Error), + Nix(#[inherit] JsNixError), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[inherit] JsErrorBox), } // ref: @@ -101,10 +119,7 @@ fn op_set_raw( is_raw: bool, cbreak: bool, ) -> Result<(), TtyError> { - let handle_or_fd = state - .resource_table - .get_fd(rid) - .map_err(TtyError::Resource)?; + let handle_or_fd = state.resource_table.get_fd(rid)?; // From https://github.com/kkawakam/rustyline/blob/master/src/tty/windows.rs // and https://github.com/kkawakam/rustyline/blob/master/src/tty/unix.rs @@ -113,13 +128,14 @@ fn op_set_raw( // Copyright (c) 2019 Timon. MIT license. #[cfg(windows)] { + use deno_error::JsErrorBox; use winapi::shared::minwindef::FALSE; use winapi::um::consoleapi; let handle = handle_or_fd; if cbreak { - return Err(TtyError::Other(deno_core::error::not_supported())); + return Err(TtyError::Other(JsErrorBox::not_supported())); } let mut original_mode: DWORD = 0; @@ -264,8 +280,8 @@ fn op_set_raw( Some(mode) => mode, None => { // Save original mode. - let original_mode = - termios::tcgetattr(raw_fd).map_err(TtyError::Nix)?; + let original_mode = termios::tcgetattr(raw_fd) + .map_err(|e| TtyError::Nix(JsNixError(e)))?; tty_mode_store.set(rid, original_mode.clone()); original_mode } @@ -288,12 +304,12 @@ fn op_set_raw( raw.control_chars[termios::SpecialCharacterIndices::VMIN as usize] = 1; raw.control_chars[termios::SpecialCharacterIndices::VTIME as usize] = 0; termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &raw) - .map_err(TtyError::Nix)?; + .map_err(|e| TtyError::Nix(JsNixError(e)))?; } else { // Try restore saved mode. if let Some(mode) = tty_mode_store.take(rid) { termios::tcsetattr(raw_fd, termios::SetArg::TCSADRAIN, &mode) - .map_err(TtyError::Nix)?; + .map_err(|e| TtyError::Nix(JsNixError(e)))?; } } @@ -311,10 +327,7 @@ fn op_console_size( result: &mut [u32], rid: u32, ) -> Result<(), TtyError> { - let fd = state - .resource_table - .get_fd(rid) - .map_err(TtyError::Resource)?; + let fd = state.resource_table.get_fd(rid)?; let size = console_size_from_fd(fd)?; result[0] = size.cols; result[1] = size.rows; @@ -432,12 +445,28 @@ mod tests { } } +deno_error::js_error_wrapper!(ReadlineError, JsReadlineError, |err| { + match err { + ReadlineError::Io(e) => e.get_class(), + ReadlineError::Eof => GENERIC_ERROR.into(), + ReadlineError::Interrupted => GENERIC_ERROR.into(), + #[cfg(unix)] + ReadlineError::Errno(e) => JsNixError(*e).get_class(), + ReadlineError::WindowResized => GENERIC_ERROR.into(), + #[cfg(windows)] + ReadlineError::Decode(_) => GENERIC_ERROR.into(), + #[cfg(windows)] + ReadlineError::SystemError(_) => GENERIC_ERROR.into(), + _ => GENERIC_ERROR.into(), + } +}); + #[op2] #[string] pub fn op_read_line_prompt( #[string] prompt_text: &str, #[string] default_value: &str, -) -> Result, ReadlineError> { +) -> Result, JsReadlineError> { let mut editor = Editor::<(), rustyline::history::DefaultHistory>::new() .expect("Failed to create editor."); @@ -457,6 +486,6 @@ pub fn op_read_line_prompt( Ok(None) } Err(ReadlineError::Eof) => Ok(None), - Err(err) => Err(err), + Err(err) => Err(JsReadlineError(err)), } } diff --git a/runtime/ops/web_worker/sync_fetch.rs b/runtime/ops/web_worker/sync_fetch.rs index c9b622d31e9c14..4c5da428b24149 100644 --- a/runtime/ops/web_worker/sync_fetch.rs +++ b/runtime/ops/web_worker/sync_fetch.rs @@ -26,30 +26,53 @@ fn mime_type_essence(mime_type: &str) -> String { essence.trim().to_ascii_lowercase() } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SyncFetchError { + #[class(type)] #[error("Blob URLs are not supported in this context.")] BlobUrlsNotSupportedInContext, + #[class(inherit)] #[error("{0}")] - Io(#[from] std::io::Error), + Io( + #[from] + #[inherit] + std::io::Error, + ), + #[class(type)] #[error("Invalid script URL")] InvalidScriptUrl, + #[class(type)] #[error("http status error: {0}")] InvalidStatusCode(http::StatusCode), + #[class(type)] #[error("Classic scripts with scheme {0}: are not supported in workers")] ClassicScriptSchemeUnsupportedInWorkers(String), + #[class(generic)] #[error("{0}")] InvalidUri(#[from] http::uri::InvalidUri), + #[class("DOMExceptionNetworkError")] #[error("Invalid MIME type {0:?}.")] InvalidMimeType(String), + #[class("DOMExceptionNetworkError")] #[error("Missing MIME type.")] MissingMimeType, + #[class(inherit)] #[error(transparent)] - Fetch(#[from] FetchError), + Fetch( + #[from] + #[inherit] + FetchError, + ), + #[class(inherit)] #[error(transparent)] - Join(#[from] tokio::task::JoinError), + Join( + #[from] + #[inherit] + tokio::task::JoinError, + ), + #[class(inherit)] #[error(transparent)] - Other(deno_core::error::AnyError), + Other(#[inherit] deno_error::JsErrorBox), } #[derive(Serialize, Deserialize)] diff --git a/runtime/ops/worker_host.rs b/runtime/ops/worker_host.rs index 45285943eb5659..c77b3af6940b92 100644 --- a/runtime/ops/worker_host.rs +++ b/runtime/ops/worker_host.rs @@ -120,16 +120,21 @@ pub struct CreateWorkerArgs { close_on_idle: bool, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum CreateWorkerError { + #[class("DOMExceptionNotSupportedError")] #[error("Classic workers are not supported.")] ClassicWorkers, + #[class(inherit)] #[error(transparent)] Permission(deno_permissions::ChildPermissionError), + #[class(inherit)] #[error(transparent)] ModuleResolution(#[from] deno_core::ModuleResolutionError), + #[class(inherit)] #[error(transparent)] MessagePort(#[from] MessagePortError), + #[class(inherit)] #[error("{0}")] Io(#[from] std::io::Error), } diff --git a/runtime/permissions/Cargo.toml b/runtime/permissions/Cargo.toml index b9259941aaf7fe..be397fe6d3f197 100644 --- a/runtime/permissions/Cargo.toml +++ b/runtime/permissions/Cargo.toml @@ -16,6 +16,7 @@ path = "lib.rs" [dependencies] capacity_builder.workspace = true deno_core.workspace = true +deno_error.workspace = true deno_path_util.workspace = true deno_terminal.workspace = true fqdn = "0.3.4" diff --git a/runtime/permissions/lib.rs b/runtime/permissions/lib.rs index 8ab4058e79ed2c..3a357d2d44a8ef 100644 --- a/runtime/permissions/lib.rs +++ b/runtime/permissions/lib.rs @@ -819,7 +819,8 @@ pub enum Host { Ip(IpAddr), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(uri)] pub enum HostParseError { #[error("invalid IPv6 address: '{0}'")] InvalidIpv6(String), @@ -954,10 +955,12 @@ pub enum NetDescriptorParseError { Host(#[from] HostParseError), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum NetDescriptorFromUrlParseError { + #[class(type)] #[error("Missing host in url: '{0}'")] MissingHost(Url), + #[class(inherit)] #[error("{0}")] Host(#[from] HostParseError), } @@ -1324,10 +1327,12 @@ pub enum RunQueryDescriptor { Name(String), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PathResolveError { + #[class(inherit)] #[error("failed resolving cwd: {0}")] CwdResolve(#[source] std::io::Error), + #[class(generic)] #[error("Empty path is not allowed")] EmptyPath, } @@ -1484,12 +1489,15 @@ pub enum AllowRunDescriptorParseResult { Descriptor(AllowRunDescriptor), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum RunDescriptorParseError { + #[class(generic)] #[error("{0}")] Which(#[from] which::Error), + #[class(inherit)] #[error("{0}")] PathResolve(#[from] PathResolveError), + #[class(generic)] #[error("Empty run query is not allowed")] EmptyRunQuery, } @@ -1573,10 +1581,12 @@ fn denies_run_name(name: &str, cmd_path: &Path) -> bool { suffix.is_empty() || suffix.starts_with('.') } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum SysDescriptorParseError { + #[class(type)] #[error("unknown system info kind \"{0}\"")] - InvalidKind(String), // TypeError + InvalidKind(String), + #[class(generic)] #[error("Empty sys not allowed")] Empty, // Error } @@ -2301,34 +2311,46 @@ pub enum CheckSpecifierKind { Dynamic, } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum ChildPermissionError { + #[class("NotCapable")] #[error("Can't escalate parent thread permissions")] Escalation, + #[class(inherit)] #[error("{0}")] PathResolve(#[from] PathResolveError), + #[class(uri)] #[error("{0}")] NetDescriptorParse(#[from] NetDescriptorParseError), + #[class(generic)] #[error("{0}")] EnvDescriptorParse(#[from] EnvDescriptorParseError), + #[class(inherit)] #[error("{0}")] SysDescriptorParse(#[from] SysDescriptorParseError), + #[class(inherit)] #[error("{0}")] RunDescriptorParse(#[from] RunDescriptorParseError), } -#[derive(Debug, thiserror::Error)] +#[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PermissionCheckError { + #[class("NotCapable")] #[error(transparent)] PermissionDenied(#[from] PermissionDeniedError), + #[class(uri)] #[error("Invalid file path.\n Specifier: {0}")] InvalidFilePath(Url), + #[class(inherit)] #[error(transparent)] NetDescriptorForUrlParse(#[from] NetDescriptorFromUrlParseError), + #[class(inherit)] #[error(transparent)] SysDescriptorParse(#[from] SysDescriptorParseError), + #[class(inherit)] #[error(transparent)] PathResolve(#[from] PathResolveError), + #[class(uri)] #[error(transparent)] HostParse(#[from] HostParseError), } diff --git a/runtime/shared.rs b/runtime/shared.rs index 1a9f2e66be1814..f8588dd72ca89a 100644 --- a/runtime/shared.rs +++ b/runtime/shared.rs @@ -6,12 +6,12 @@ use std::path::Path; use deno_ast::MediaType; use deno_ast::ParseParams; use deno_ast::SourceMapOption; -use deno_core::error::AnyError; use deno_core::extension; use deno_core::Extension; use deno_core::ModuleCodeString; use deno_core::ModuleName; use deno_core::SourceMapData; +use deno_error::JsErrorBox; extension!(runtime, deps = [ @@ -64,10 +64,21 @@ extension!(runtime, } ); +deno_error::js_error_wrapper!( + deno_ast::ParseDiagnostic, + JsParseDiagnostic, + "Error" +); +deno_error::js_error_wrapper!( + deno_ast::TranspileError, + JsTranspileError, + "Error" +); + pub fn maybe_transpile_source( name: ModuleName, source: ModuleCodeString, -) -> Result<(ModuleCodeString, Option), AnyError> { +) -> Result<(ModuleCodeString, Option), JsErrorBox> { // Always transpile `node:` built-in modules, since they might be TypeScript. let media_type = if name.starts_with("node:") { MediaType::TypeScript @@ -92,7 +103,8 @@ pub fn maybe_transpile_source( capture_tokens: false, scope_analysis: false, maybe_syntax: None, - })?; + }) + .map_err(|e| JsErrorBox::from_err(JsParseDiagnostic(e)))?; let transpiled_source = parsed .transpile( &deno_ast::TranspileOptions { @@ -108,7 +120,8 @@ pub fn maybe_transpile_source( }, ..Default::default() }, - )? + ) + .map_err(|e| JsErrorBox::from_err(JsTranspileError(e)))? .into_source(); let maybe_source_map: Option = transpiled_source diff --git a/runtime/web_worker.rs b/runtime/web_worker.rs index 270fc1ab9f77b7..64393bb64cbec5 100644 --- a/runtime/web_worker.rs +++ b/runtime/web_worker.rs @@ -13,8 +13,7 @@ use std::task::Poll; use deno_broadcast_channel::InMemoryBroadcastChannel; use deno_cache::CreateCache; use deno_cache::SqliteBackedCache; -use deno_core::error::AnyError; -use deno_core::error::JsError; +use deno_core::error::CoreError; use deno_core::futures::channel::mpsc; use deno_core::futures::future::poll_fn; use deno_core::futures::stream::StreamExt; @@ -29,7 +28,6 @@ use deno_core::CompiledWasmModuleStore; use deno_core::DetachedBuffer; use deno_core::Extension; use deno_core::FeatureChecker; -use deno_core::GetErrorClassFn; use deno_core::JsRuntime; use deno_core::ModuleCodeString; use deno_core::ModuleId; @@ -105,8 +103,7 @@ pub enum WebWorkerType { /// Events that are sent to host from child /// worker. pub enum WorkerControlEvent { - Error(AnyError), - TerminalError(AnyError), + TerminalError(CoreError), Close, } @@ -119,15 +116,13 @@ impl Serialize for WorkerControlEvent { { let type_id = match &self { WorkerControlEvent::TerminalError(_) => 1_i32, - WorkerControlEvent::Error(_) => 2_i32, WorkerControlEvent::Close => 3_i32, }; match self { - WorkerControlEvent::TerminalError(error) - | WorkerControlEvent::Error(error) => { - let value = match error.downcast_ref::() { - Some(js_error) => { + WorkerControlEvent::TerminalError(error) => { + let value = match error { + CoreError::Js(js_error) => { let frame = js_error.frames.iter().find(|f| match &f.file_name { Some(s) => !s.trim_start_matches('[').starts_with("ext:"), None => false, @@ -139,7 +134,7 @@ impl Serialize for WorkerControlEvent { "columnNumber": frame.map(|f| f.column_number.as_ref()), }) } - None => json!({ + _ => json!({ "message": error.to_string(), }), }; @@ -368,7 +363,6 @@ pub struct WebWorkerOptions { pub create_web_worker_cb: Arc, pub format_js_error_fn: Option>, pub worker_type: WebWorkerType, - pub get_error_class_fn: Option, pub cache_storage_dir: Option, pub stdio: Stdio, pub strace_ops: Option>, @@ -569,7 +563,6 @@ impl WebWorker { module_loader: Some(services.module_loader), startup_snapshot: options.startup_snapshot, create_params: options.create_params, - get_error_class_fn: options.get_error_class_fn, shared_array_buffer_store: services.shared_array_buffer_store, compiled_wasm_module_store: services.compiled_wasm_module_store, extensions, @@ -737,7 +730,7 @@ impl WebWorker { &mut self, name: &'static str, source_code: ModuleCodeString, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { self.js_runtime.execute_script(name, source_code)?; Ok(()) } @@ -746,7 +739,7 @@ impl WebWorker { pub async fn preload_main_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { self.js_runtime.load_main_es_module(module_specifier).await } @@ -754,7 +747,7 @@ impl WebWorker { pub async fn preload_side_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { self.js_runtime.load_side_es_module(module_specifier).await } @@ -765,7 +758,7 @@ impl WebWorker { pub async fn execute_side_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { let id = self.preload_side_module(module_specifier).await?; let mut receiver = self.js_runtime.mod_evaluate(id); tokio::select! { @@ -789,7 +782,7 @@ impl WebWorker { pub async fn execute_main_module( &mut self, id: ModuleId, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { let mut receiver = self.js_runtime.mod_evaluate(id); let poll_options = PollEventLoopOptions::default(); @@ -815,7 +808,7 @@ impl WebWorker { &mut self, cx: &mut Context, poll_options: PollEventLoopOptions, - ) -> Poll> { + ) -> Poll> { // If awakened because we are terminating, just return Ok if self.internal_handle.terminate_if_needed() { return Poll::Ready(Ok(())); @@ -859,7 +852,7 @@ impl WebWorker { pub async fn run_event_loop( &mut self, poll_options: PollEventLoopOptions, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { poll_fn(|cx| self.poll_event_loop(cx, poll_options)).await } @@ -893,14 +886,14 @@ impl WebWorker { } fn print_worker_error( - error: &AnyError, + error: &CoreError, name: &str, format_js_error_fn: Option<&FormatJsErrorFn>, ) { let error_str = match format_js_error_fn { - Some(format_js_error_fn) => match error.downcast_ref::() { - Some(js_error) => format_js_error_fn(js_error), - None => error.to_string(), + Some(format_js_error_fn) => match error { + CoreError::Js(js_error) => format_js_error_fn(js_error), + _ => error.to_string(), }, None => error.to_string(), }; @@ -919,7 +912,7 @@ pub fn run_web_worker( specifier: ModuleSpecifier, mut maybe_source_code: Option, format_js_error_fn: Option>, -) -> Result<(), AnyError> { +) -> Result<(), CoreError> { let name = worker.name.to_string(); // TODO(bartlomieju): run following block using "select!" diff --git a/runtime/worker.rs b/runtime/worker.rs index de29b66291fb8e..a649c83d478f2a 100644 --- a/runtime/worker.rs +++ b/runtime/worker.rs @@ -12,14 +12,13 @@ use std::time::Instant; use deno_broadcast_channel::InMemoryBroadcastChannel; use deno_cache::CreateCache; use deno_cache::SqliteBackedCache; -use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::error::JsError; use deno_core::merge_op_metrics; use deno_core::v8; use deno_core::CompiledWasmModuleStore; use deno_core::Extension; use deno_core::FeatureChecker; -use deno_core::GetErrorClassFn; use deno_core::InspectorSessionKind; use deno_core::InspectorSessionOptions; use deno_core::JsRuntime; @@ -59,10 +58,10 @@ use crate::BootstrapOptions; pub type FormatJsErrorFn = dyn Fn(&JsError) -> String + Sync + Send; pub fn import_meta_resolve_callback( - loader: &dyn deno_core::ModuleLoader, + loader: &dyn ModuleLoader, specifier: String, referrer: String, -) -> Result { +) -> Result { loader.resolve( &specifier, &referrer, @@ -202,9 +201,6 @@ pub struct WorkerOptions { /// If Some, print a low-level trace output for ops matching the given patterns. pub strace_ops: Option>, - /// Allows to map error type to a string "class" used to represent - /// error in JavaScript. - pub get_error_class_fn: Option, pub cache_storage_dir: Option, pub origin_storage_dir: Option, pub stdio: Stdio, @@ -225,7 +221,6 @@ impl Default for WorkerOptions { strace_ops: Default::default(), maybe_inspector_server: Default::default(), format_js_error_fn: Default::default(), - get_error_class_fn: Default::default(), origin_storage_dir: Default::default(), cache_storage_dir: Default::default(), extensions: Default::default(), @@ -489,7 +484,6 @@ impl MainWorker { startup_snapshot: options.startup_snapshot, create_params: options.create_params, skip_op_registration: options.skip_op_registration, - get_error_class_fn: options.get_error_class_fn, shared_array_buffer_store: services.shared_array_buffer_store.clone(), compiled_wasm_module_store: services.compiled_wasm_module_store.clone(), extensions, @@ -708,7 +702,7 @@ impl MainWorker { &mut self, script_name: &'static str, source_code: ModuleCodeString, - ) -> Result, AnyError> { + ) -> Result, CoreError> { self.js_runtime.execute_script(script_name, source_code) } @@ -716,7 +710,7 @@ impl MainWorker { pub async fn preload_main_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { self.js_runtime.load_main_es_module(module_specifier).await } @@ -724,7 +718,7 @@ impl MainWorker { pub async fn preload_side_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result { + ) -> Result { self.js_runtime.load_side_es_module(module_specifier).await } @@ -732,7 +726,7 @@ impl MainWorker { pub async fn evaluate_module( &mut self, id: ModuleId, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { self.wait_for_inspector_session(); let mut receiver = self.js_runtime.mod_evaluate(id); tokio::select! { @@ -757,7 +751,7 @@ impl MainWorker { pub async fn run_up_to_duration( &mut self, duration: Duration, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { match tokio::time::timeout( duration, self @@ -776,7 +770,7 @@ impl MainWorker { pub async fn execute_side_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { let id = self.preload_side_module(module_specifier).await?; self.evaluate_module(id).await } @@ -787,7 +781,7 @@ impl MainWorker { pub async fn execute_main_module( &mut self, module_specifier: &ModuleSpecifier, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { let id = self.preload_main_module(module_specifier).await?; self.evaluate_module(id).await } @@ -818,10 +812,10 @@ impl MainWorker { pub async fn run_event_loop( &mut self, wait_for_inspector: bool, - ) -> Result<(), AnyError> { + ) -> Result<(), CoreError> { self .js_runtime - .run_event_loop(deno_core::PollEventLoopOptions { + .run_event_loop(PollEventLoopOptions { wait_for_inspector, ..Default::default() }) @@ -837,7 +831,7 @@ impl MainWorker { /// Dispatches "load" event to the JavaScript runtime. /// /// Does not poll event loop, and thus not await any of the "load" event handlers. - pub fn dispatch_load_event(&mut self) -> Result<(), AnyError> { + pub fn dispatch_load_event(&mut self) -> Result<(), JsError> { let scope = &mut self.js_runtime.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); let dispatch_load_event_fn = @@ -846,7 +840,7 @@ impl MainWorker { dispatch_load_event_fn.call(tc_scope, undefined.into(), &[]); if let Some(exception) = tc_scope.exception() { let error = JsError::from_v8_exception(tc_scope, exception); - return Err(error.into()); + return Err(error); } Ok(()) } @@ -854,7 +848,7 @@ impl MainWorker { /// Dispatches "unload" event to the JavaScript runtime. /// /// Does not poll event loop, and thus not await any of the "unload" event handlers. - pub fn dispatch_unload_event(&mut self) -> Result<(), AnyError> { + pub fn dispatch_unload_event(&mut self) -> Result<(), JsError> { let scope = &mut self.js_runtime.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); let dispatch_unload_event_fn = @@ -863,13 +857,13 @@ impl MainWorker { dispatch_unload_event_fn.call(tc_scope, undefined.into(), &[]); if let Some(exception) = tc_scope.exception() { let error = JsError::from_v8_exception(tc_scope, exception); - return Err(error.into()); + return Err(error); } Ok(()) } /// Dispatches process.emit("exit") event for node compat. - pub fn dispatch_process_exit_event(&mut self) -> Result<(), AnyError> { + pub fn dispatch_process_exit_event(&mut self) -> Result<(), JsError> { let scope = &mut self.js_runtime.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); let dispatch_process_exit_event_fn = @@ -878,7 +872,7 @@ impl MainWorker { dispatch_process_exit_event_fn.call(tc_scope, undefined.into(), &[]); if let Some(exception) = tc_scope.exception() { let error = JsError::from_v8_exception(tc_scope, exception); - return Err(error.into()); + return Err(error); } Ok(()) } @@ -886,7 +880,7 @@ impl MainWorker { /// Dispatches "beforeunload" event to the JavaScript runtime. Returns a boolean /// indicating if the event was prevented and thus event loop should continue /// running. - pub fn dispatch_beforeunload_event(&mut self) -> Result { + pub fn dispatch_beforeunload_event(&mut self) -> Result { let scope = &mut self.js_runtime.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); let dispatch_beforeunload_event_fn = @@ -896,16 +890,14 @@ impl MainWorker { dispatch_beforeunload_event_fn.call(tc_scope, undefined.into(), &[]); if let Some(exception) = tc_scope.exception() { let error = JsError::from_v8_exception(tc_scope, exception); - return Err(error.into()); + return Err(error); } let ret_val = ret_val.unwrap(); Ok(ret_val.is_false()) } /// Dispatches process.emit("beforeExit") event for node compat. - pub fn dispatch_process_beforeexit_event( - &mut self, - ) -> Result { + pub fn dispatch_process_beforeexit_event(&mut self) -> Result { let scope = &mut self.js_runtime.handle_scope(); let tc_scope = &mut v8::TryCatch::new(scope); let dispatch_process_beforeexit_event_fn = v8::Local::new( @@ -920,7 +912,7 @@ impl MainWorker { ); if let Some(exception) = tc_scope.exception() { let error = JsError::from_v8_exception(tc_scope, exception); - return Err(error.into()); + return Err(error); } let ret_val = ret_val.unwrap(); Ok(ret_val.is_true()) diff --git a/tests/specs/npm/npmrc_tarball_other_server/fail/main.out b/tests/specs/npm/npmrc_tarball_other_server/fail/main.out index 2c68dba54e28e7..d49bc148ea4d50 100644 --- a/tests/specs/npm/npmrc_tarball_other_server/fail/main.out +++ b/tests/specs/npm/npmrc_tarball_other_server/fail/main.out @@ -1,6 +1,6 @@ Download http://localhost:4261/@denotest%2ftarballs-privateserver2 Download http://localhost:4262/@denotest/tarballs-privateserver2/1.0.0.tgz -error: Failed caching npm package '@denotest/tarballs-privateserver2@1.0.0'. +error: Failed caching npm package '@denotest/tarballs-privateserver2@1.0.0' Caused by: No auth for tarball URI, but present for scoped registry. diff --git a/tests/specs/npm/npmrc_tarball_other_server/success/main.out b/tests/specs/npm/npmrc_tarball_other_server/success/main.out index 5322a1a17deb25..239f1d525bedf9 100644 --- a/tests/specs/npm/npmrc_tarball_other_server/success/main.out +++ b/tests/specs/npm/npmrc_tarball_other_server/success/main.out @@ -4,7 +4,7 @@ Download http://localhost:4262/@denotest/tarballs-privateserver2/1.0.0.tgz [# to serve proper checksums for a package at another registry. That's fine] [# though because this shows us that we're making it to this step instead of] [# failing sooner on an auth issue.] -error: Failed caching npm package '@denotest/tarballs-privateserver2@1.0.0'. +error: Failed caching npm package '@denotest/tarballs-privateserver2@1.0.0' Caused by: Tarball checksum did not match [WILDCARD]