diff --git a/Cargo.lock b/Cargo.lock index e17bb71ffb..239ae08c09 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -109,12 +109,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -[[package]] -name = "assert_matches" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" - [[package]] name = "async-attributes" version = "1.1.2" @@ -1480,6 +1474,7 @@ version = "0.30.0" dependencies = [ "bstr", "document-features", + "gix-error", "gix-fs", "gix-glob", "gix-path", @@ -1489,7 +1484,6 @@ dependencies = [ "kstring", "serde", "smallvec", - "thiserror 2.0.18", "unicode-bom", ] @@ -1497,8 +1491,8 @@ dependencies = [ name = "gix-bitmap" version = "0.2.15" dependencies = [ + "gix-error", "gix-testtools", - "thiserror 2.0.18", ] [[package]] @@ -1589,6 +1583,7 @@ dependencies = [ "bytesize", "cap", "gix-config", + "gix-error", "gix-path", "gix-ref", "gix-sec", @@ -1618,6 +1613,7 @@ dependencies = [ "gix-command", "gix-config-value", "gix-date", + "gix-error", "gix-path", "gix-prompt", "gix-quote", @@ -1677,6 +1673,7 @@ name = "gix-diff-tests" version = "0.0.0" dependencies = [ "gix-diff", + "gix-error", "gix-filter", "gix-fs", "gix-hash", @@ -1698,6 +1695,7 @@ version = "0.20.0" dependencies = [ "bstr", "gix-discover", + "gix-error", "gix-fs", "gix-ignore", "gix-index", @@ -1719,6 +1717,7 @@ dependencies = [ "bstr", "defer", "dunce", + "gix-error", "gix-fs", "gix-path", "gix-ref", @@ -1751,6 +1750,7 @@ dependencies = [ "crc32fast", "crossbeam-channel", "document-features", + "gix-error", "gix-path", "gix-trace", "gix-utils", @@ -1758,7 +1758,6 @@ dependencies = [ "once_cell", "parking_lot", "prodash", - "thiserror 2.0.18", "walkdir", "zlib-rs 0.6.0", ] @@ -1796,13 +1795,13 @@ dependencies = [ "bstr", "crossbeam-channel", "fastrand", + "gix-error", "gix-features", "gix-path", "gix-utils", "is_ci", "serde", "tempfile", - "thiserror 2.0.18", ] [[package]] @@ -1835,12 +1834,12 @@ version = "0.22.0" dependencies = [ "document-features", "faster-hex", + "gix-error", "gix-features", "gix-testtools", "serde", "sha1-checked", "sha2", - "thiserror 2.0.18", ] [[package]] @@ -1877,6 +1876,7 @@ dependencies = [ "filetime", "fnv", "gix-bitmap", + "gix-error", "gix-features", "gix-fs", "gix-hash", @@ -1902,6 +1902,7 @@ version = "0.0.0" dependencies = [ "bstr", "filetime", + "gix-error", "gix-features", "gix-hash", "gix-index", @@ -1918,10 +1919,10 @@ version = "0.0.0" name = "gix-lock" version = "21.0.0" dependencies = [ + "gix-error", "gix-tempfile", "gix-utils", "tempfile", - "thiserror 2.0.18", ] [[package]] @@ -1942,9 +1943,9 @@ dependencies = [ "document-features", "gix-actor", "gix-date", + "gix-error", "gix-testtools", "serde", - "thiserror 2.0.18", ] [[package]] @@ -1955,6 +1956,7 @@ dependencies = [ "document-features", "gix-command", "gix-diff", + "gix-error", "gix-filter", "gix-fs", "gix-hash", @@ -2007,6 +2009,7 @@ dependencies = [ "document-features", "gix-actor", "gix-date", + "gix-error", "gix-features", "gix-hash", "gix-hashtable", @@ -2030,6 +2033,7 @@ version = "0.75.0" dependencies = [ "arc-swap", "document-features", + "gix-error", "gix-features", "gix-fs", "gix-hash", @@ -2052,6 +2056,7 @@ dependencies = [ "filetime", "gix-actor", "gix-date", + "gix-error", "gix-features", "gix-hash", "gix-object", @@ -2092,6 +2097,7 @@ name = "gix-pack-tests" version = "0.0.0" dependencies = [ "bstr", + "gix-error", "gix-features", "gix-hash", "gix-object", @@ -2113,6 +2119,7 @@ dependencies = [ "faster-hex", "futures-io", "futures-lite", + "gix-error", "gix-hash", "gix-odb", "gix-pack", @@ -2120,7 +2127,6 @@ dependencies = [ "maybe-async", "pin-project-lite", "serde", - "thiserror 2.0.18", ] [[package]] @@ -2128,11 +2134,11 @@ name = "gix-path" version = "0.11.0" dependencies = [ "bstr", + "gix-error", "gix-testtools", "gix-trace", "gix-validate", "serial_test", - "thiserror 2.0.18", "windows 0.62.2", "winreg 0.55.0", ] @@ -2178,6 +2184,7 @@ dependencies = [ "futures-lite", "gix-credentials", "gix-date", + "gix-error", "gix-features", "gix-hash", "gix-lock", @@ -2202,8 +2209,8 @@ name = "gix-quote" version = "0.6.1" dependencies = [ "bstr", + "gix-error", "gix-utils", - "thiserror 2.0.18", ] [[package]] @@ -2217,6 +2224,7 @@ dependencies = [ "document-features", "gix-actor", "gix-date", + "gix-error", "gix-features", "gix-fs", "gix-hash", @@ -2342,6 +2350,7 @@ dependencies = [ "filetime", "gix-diff", "gix-dir", + "gix-error", "gix-features", "gix-filter", "gix-fs", @@ -2417,6 +2426,7 @@ dependencies = [ "fastrand", "fs_extra", "gix-discover", + "gix-error", "gix-fs", "gix-lock", "gix-tempfile", @@ -2458,6 +2468,7 @@ dependencies = [ "futures-lite", "gix-command", "gix-credentials", + "gix-error", "gix-features", "gix-hash", "gix-pack", @@ -2509,14 +2520,13 @@ version = "0.0.0" name = "gix-url" version = "0.35.0" dependencies = [ - "assert_matches", "bstr", "document-features", + "gix-error", "gix-path", "gix-testtools", "percent-encoding", "serde", - "thiserror 2.0.18", ] [[package]] diff --git a/gitoxide-core/src/net.rs b/gitoxide-core/src/net.rs index 533b8ec875..305c8f8cff 100644 --- a/gitoxide-core/src/net.rs +++ b/gitoxide-core/src/net.rs @@ -49,6 +49,7 @@ pub async fn connect( where Url: TryInto, gix::url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { Ok(gix::protocol::SendFlushOnDrop::new( io_mode::connect::connect(url, options).await?, diff --git a/gitoxide-core/src/pack/create.rs b/gitoxide-core/src/pack/create.rs index 7e9ebcb285..4446d045c3 100644 --- a/gitoxide-core/src/pack/create.rs +++ b/gitoxide-core/src/pack/create.rs @@ -148,7 +148,10 @@ where .map(|hex_id| { hex_id .map_err(|err| Box::new(err) as Box<_>) - .and_then(|hex_id| ObjectId::from_hex(hex_id.as_bytes()).map_err(Into::into)) + .and_then(|hex_id| { + ObjectId::from_hex(hex_id.as_bytes()) + .map_err(|e| Box::new(e.into_error()) as Box<_>) + }) }) .inspect(move |_| progress.inc()), ), diff --git a/gix-archive/tests/archive.rs b/gix-archive/tests/archive.rs index 6a33314d0c..c9cd867a57 100644 --- a/gix-archive/tests/archive.rs +++ b/gix-archive/tests/archive.rs @@ -289,7 +289,7 @@ mod from_tree { let head = { let hex = std::fs::read(dir.join("head.hex"))?; - gix_hash::ObjectId::from_hex(hex.trim())? + gix_hash::ObjectId::from_hex(hex.trim()).map_err(gix_error::Exn::into_error)? }; let odb = gix_odb::at(dir.join(".git").join("objects"))?; diff --git a/gix-attributes/Cargo.toml b/gix-attributes/Cargo.toml index 6a6eb9ddd9..9c3b9db855 100644 --- a/gix-attributes/Cargo.toml +++ b/gix-attributes/Cargo.toml @@ -28,7 +28,7 @@ bstr = { version = "1.12.0", default-features = false, features = ["std", "unico smallvec = "1.15.1" kstring = "2.0.0" unicode-bom = { version = "2.0.3" } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"] } document-features = { version = "0.2.1", optional = true } diff --git a/gix-attributes/src/name.rs b/gix-attributes/src/name.rs index 6ca70248a1..beacbca094 100644 --- a/gix-attributes/src/name.rs +++ b/gix-attributes/src/name.rs @@ -1,4 +1,5 @@ -use bstr::{BStr, BString, ByteSlice}; +use bstr::{BStr, ByteSlice}; +use gix_error::OptionExt; use kstring::KStringRef; use crate::{Name, NameRef}; @@ -36,7 +37,7 @@ impl<'a> TryFrom<&'a BStr> for NameRef<'a> { attr_valid(attr) .then(|| NameRef(KStringRef::from_ref(attr.to_str().expect("no illformed utf8")))) - .ok_or_else(|| Error { attribute: attr.into() }) + .ok_or_raise(|| gix_error::message!("Attribute has non-ascii characters or starts with '-': {attr}")) } } @@ -59,9 +60,4 @@ impl AsRef for Name { } /// The error returned by [`parse::Iter`][crate::parse::Iter]. -#[derive(Debug, thiserror::Error)] -#[error("Attribute has non-ascii characters or starts with '-': {attribute}")] -pub struct Error { - /// The attribute that failed to parse. - pub attribute: BString, -} +pub type Error = gix_error::Exn; diff --git a/gix-attributes/src/parse.rs b/gix-attributes/src/parse.rs index 6bbf9bf23c..00762c240d 100644 --- a/gix-attributes/src/parse.rs +++ b/gix-attributes/src/parse.rs @@ -1,6 +1,7 @@ use std::borrow::Cow; use bstr::{BStr, ByteSlice}; +use gix_error::{message, ErrorExt, OptionExt, ResultExt}; use kstring::KStringRef; use crate::{name, AssignmentRef, Name, NameRef, StateRef}; @@ -15,23 +16,8 @@ pub enum Kind { Macro(Name), } -mod error { - use bstr::BString; - /// The error returned by [`parse::Lines`][crate::parse::Lines]. - #[derive(thiserror::Error, Debug)] - #[allow(missing_docs)] - pub enum Error { - #[error(r"Line {line_number} has a negative pattern, for literal characters use \!: {line}")] - PatternNegation { line_number: usize, line: BString }, - #[error("Attribute in line {line_number} has non-ascii characters or starts with '-': {attribute}")] - AttributeName { line_number: usize, attribute: BString }, - #[error("Macro in line {line_number} has non-ascii characters or starts with '-': {macro_name}")] - MacroName { line_number: usize, macro_name: BString }, - #[error("Could not unquote attributes line")] - Unquote(#[from] gix_quote::ansi_c::undo::Error), - } -} -pub use error::Error; +/// The error returned by [`parse::Lines`][crate::parse::Lines]. +pub type Error = gix_error::Exn; /// An iterator over attribute assignments, parsed line by line. pub struct Lines<'a> { @@ -77,7 +63,7 @@ fn check_attr(attr: &BStr) -> Result, name::Error> { attr_valid(attr) .then(|| NameRef(KStringRef::from_ref(attr.to_str().expect("no illformed utf8")))) - .ok_or_else(|| name::Error { attribute: attr.into() }) + .ok_or_raise(|| gix_error::message!("Attribute has non-ascii characters or starts with '-': {attr}")) } impl<'a> Iterator for Iter<'a> { @@ -129,10 +115,11 @@ fn parse_line(line: &BStr, line_number: usize) -> Option, } let (line, attrs): (Cow<'_, _>, _) = if line.starts_with(b"\"") { - let (unquoted, consumed) = match gix_quote::ansi_c::undo(line) { - Ok(res) => res, - Err(err) => return Some(Err(err.into())), - }; + let (unquoted, consumed) = + match gix_quote::ansi_c::undo(line).or_raise(|| message("Could not unquote attributes line")) { + Ok(res) => res, + Err(err) => return Some(Err(err)), + }; (unquoted, &line[consumed..]) } else { line.find_byteset(BLANKS) @@ -142,18 +129,16 @@ fn parse_line(line: &BStr, line_number: usize) -> Option, let kind_res = match line.strip_prefix(b"[attr]") { Some(macro_name) => check_attr(macro_name.into()) - .map_err(|err| Error::MacroName { - line_number, - macro_name: err.attribute, - }) + .or_raise(|| gix_error::message!("Macro in line {line_number} has non-ascii characters or starts with '-'")) .map(|name| Kind::Macro(name.to_owned())), None => { let pattern = gix_glob::Pattern::from_bytes(line.as_ref())?; if pattern.mode.contains(gix_glob::pattern::Mode::NEGATIVE) { - Err(Error::PatternNegation { - line: line.into_owned(), - line_number, - }) + Err(gix_error::message!( + r"Line {line_number} has a negative pattern, for literal characters use \!: {}", + line + ) + .raise()) } else { Ok(Kind::Pattern(pattern)) } diff --git a/gix-attributes/tests/parse/mod.rs b/gix-attributes/tests/parse/mod.rs index b7d929360d..307a551824 100644 --- a/gix-attributes/tests/parse/mod.rs +++ b/gix-attributes/tests/parse/mod.rs @@ -94,17 +94,13 @@ fn exclamation_marks_must_be_escaped_or_error_unlike_gitignore() { line(r"\!hello"), (pattern(r"!hello", Mode::NO_SUB_DIR, None), vec![], 1) ); - assert!(matches!( - try_line(r"!hello"), - Err(parse::Error::PatternNegation { line_number: 1, .. }) - )); + let err = try_line(r"!hello").unwrap_err(); + assert!(err.to_string().contains("negative pattern"), "{err}"); assert!(lenient_lines(r#"!hello"#).is_empty()); + let err = try_line(r#""!hello""#).unwrap_err(); assert!( - matches!( - try_line(r#""!hello""#), - Err(parse::Error::PatternNegation { line_number: 1, .. }), - ), - "even in quotes they trigger…" + err.to_string().contains("negative pattern"), + "even in quotes they trigger…: {err}" ); assert!(lenient_lines(r#""!hello""#).is_empty()); assert_eq!( @@ -116,7 +112,8 @@ fn exclamation_marks_must_be_escaped_or_error_unlike_gitignore() { #[test] fn invalid_escapes_in_quotes_are_an_error() { - assert!(matches!(try_line(r#""\!hello""#), Err(parse::Error::Unquote(_)))); + let err = try_line(r#""\!hello""#).unwrap_err(); + assert!(err.to_string().contains("unquote"), "{err}"); assert!(lenient_lines(r#""\!hello""#).is_empty()); } @@ -170,47 +167,33 @@ fn macros_can_be_empty() { #[test] fn custom_macros_must_be_valid_attribute_names() { - assert!(matches!( - try_line(r"[attr]-prefixdash"), - Err(parse::Error::MacroName { line_number: 1, .. }) - )); + let err = try_line(r"[attr]-prefixdash").unwrap_err(); + assert!(err.to_string().contains("Macro"), "{err}"); assert!(lenient_lines(r"[attr]-prefixdash").is_empty()); - assert!(matches!( - try_line(r"[attr]!exclamation"), - Err(parse::Error::MacroName { line_number: 1, .. }) - )); - assert!(matches!( - try_line(r"[attr]assignment=value"), - Err(parse::Error::MacroName { line_number: 1, .. }) - )); - assert!(matches!( - try_line(r"[attr]你好"), - Err(parse::Error::MacroName { line_number: 1, .. }) - )); + let err = try_line(r"[attr]!exclamation").unwrap_err(); + assert!(err.to_string().contains("Macro"), "{err}"); + let err = try_line(r"[attr]assignment=value").unwrap_err(); + assert!(err.to_string().contains("Macro"), "{err}"); + let err = try_line(r"[attr]你好").unwrap_err(); + assert!(err.to_string().contains("Macro"), "{err}"); assert!(lenient_lines(r"[attr]你好").is_empty()); } #[test] fn attribute_names_must_not_begin_with_dash_and_must_be_ascii_only() { - assert!(matches!( - try_line(r"p !-a"), - Err(parse::Error::AttributeName { line_number: 1, .. }) - )); + let err = try_line(r"p !-a").unwrap_err(); + assert!(err.to_string().contains("Attribute"), "{err}"); assert!(lenient_lines(r"p !-a").is_empty()); + let err = try_line(r#"p !!a"#).unwrap_err(); assert!( - matches!( - try_line(r#"p !!a"#), - Err(parse::Error::AttributeName { line_number: 1, .. }) - ), - "exclamation marks aren't allowed either" + err.to_string().contains("Attribute"), + "exclamation marks aren't allowed either: {err}" ); assert!(lenient_lines(r#"p !!a"#).is_empty()); + let err = try_line(r#"p 你好"#).unwrap_err(); assert!( - matches!( - try_line(r#"p 你好"#), - Err(parse::Error::AttributeName { line_number: 1, .. }) - ), - "nor is utf-8 encoded characters - gitoxide could consider to relax this when established" + err.to_string().contains("Attribute"), + "nor is utf-8 encoded characters - gitoxide could consider to relax this when established: {err}" ); assert!(lenient_lines(r#"p 你好"#).is_empty()); } @@ -386,13 +369,11 @@ fn try_lines(input: &str) -> Result>, parse::Error> { fn expand( input: Result<(parse::Kind, parse::Iter<'_>, usize), parse::Error>, ) -> Result, parse::Error> { + use gix_error::ResultExt; let (pattern, attrs, line_no) = input?; let attrs = attrs .map(|r| r.map(|attr| (attr.name.as_str().into(), attr.state))) .collect::, _>>() - .map_err(|e| parse::Error::AttributeName { - attribute: e.attribute, - line_number: line_no, - })?; + .or_raise(|| gix_error::message!("Attribute in line {line_no} has non-ascii characters or starts with '-'"))?; Ok((pattern, attrs, line_no)) } diff --git a/gix-bitmap/Cargo.toml b/gix-bitmap/Cargo.toml index 97293fe6c3..89ce0b811f 100644 --- a/gix-bitmap/Cargo.toml +++ b/gix-bitmap/Cargo.toml @@ -16,7 +16,7 @@ doctest = false test = true [dependencies] -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } [dev-dependencies] gix-testtools = { path = "../tests/tools" } diff --git a/gix-bitmap/src/ewah.rs b/gix-bitmap/src/ewah.rs index c9599be313..eb1dd94acd 100644 --- a/gix-bitmap/src/ewah.rs +++ b/gix-bitmap/src/ewah.rs @@ -1,25 +1,16 @@ /// pub mod decode { /// The error returned by [`decode()`][super::decode()]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("{}", message)] - Corrupt { message: &'static str }, - } + pub type Error = gix_error::Exn; } /// Decode `data` as EWAH bitmap. pub fn decode(data: &[u8]) -> Result<(Vec, &[u8]), decode::Error> { - use self::decode::Error; use crate::decode; + use gix_error::{message, OptionExt}; - let (num_bits, data) = decode::u32(data).ok_or(Error::Corrupt { - message: "eof reading amount of bits", - })?; - let (len, data) = decode::u32(data).ok_or(Error::Corrupt { - message: "eof reading chunk length", - })?; + let (num_bits, data) = decode::u32(data).ok_or_raise(|| message("eof reading amount of bits"))?; + let (len, data) = decode::u32(data).ok_or_raise(|| message("eof reading chunk length"))?; let len = len as usize; // NOTE: git does this by copying all bytes first, and then it will change the endianness in a separate loop. @@ -27,9 +18,7 @@ pub fn decode(data: &[u8]) -> Result<(Vec, &[u8]), decode::Error> { // one day somebody will find out that it's worth it to use unsafe here. let (mut bits, data) = data .split_at_checked(len * std::mem::size_of::()) - .ok_or(Error::Corrupt { - message: "eof while reading bit data", - })?; + .ok_or_raise(|| message("eof while reading bit data"))?; let mut buf = std::vec::Vec::::with_capacity(len); for _ in 0..len { let (bit_num, rest) = bits.split_at(std::mem::size_of::()); @@ -37,9 +26,7 @@ pub fn decode(data: &[u8]) -> Result<(Vec, &[u8]), decode::Error> { buf.push(u64::from_be_bytes(bit_num.try_into().unwrap())); } - let (rlw, data) = decode::u32(data).ok_or(Error::Corrupt { - message: "eof while reading run length width", - })?; + let (rlw, data) = decode::u32(data).ok_or_raise(|| message("eof while reading run length width"))?; Ok(( Vec { diff --git a/gix-config/tests/Cargo.toml b/gix-config/tests/Cargo.toml index 173243867e..8b2f551c93 100644 --- a/gix-config/tests/Cargo.toml +++ b/gix-config/tests/Cargo.toml @@ -22,6 +22,7 @@ path = "mem.rs" [dev-dependencies] gix-config = { path = ".." } +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-testtools = { path = "../../tests/tools" } gix-ref = { path = "../../gix-ref" } gix-path = { path = "../../gix-path" } diff --git a/gix-config/tests/config/file/init/from_paths/includes/conditional/gitdir/util.rs b/gix-config/tests/config/file/init/from_paths/includes/conditional/gitdir/util.rs index c78da3b016..f33ea5c48b 100644 --- a/gix-config/tests/config/file/init/from_paths/includes/conditional/gitdir/util.rs +++ b/gix-config/tests/config/file/init/from_paths/includes/conditional/gitdir/util.rs @@ -69,10 +69,10 @@ impl Condition { impl GitEnv { pub fn repo_name(repo_name: impl AsRef) -> crate::Result { let tempdir = gix_testtools::tempfile::tempdir()?; - let root_dir = gix_path::realpath(tempdir.path())?; + let root_dir = gix_path::realpath(tempdir.path()).map_err(gix_error::Exn::into_error)?; let worktree_dir = root_dir.join(repo_name); std::fs::create_dir_all(&worktree_dir)?; - let home_dir = gix_path::realpath(tempdir.path())?; + let home_dir = gix_path::realpath(tempdir.path()).map_err(gix_error::Exn::into_error)?; Ok(Self { tempdir, root_dir, diff --git a/gix-credentials/Cargo.toml b/gix-credentials/Cargo.toml index c59f14c964..b87a8e1558 100644 --- a/gix-credentials/Cargo.toml +++ b/gix-credentials/Cargo.toml @@ -37,6 +37,7 @@ bstr = { version = "1.12.0", default-features = false, features = ["std"] } document-features = { version = "0.2.1", optional = true } [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-quote = { path = "../gix-quote" } gix-sec = { path = "../gix-sec" } gix-testtools = { path = "../tests/tools" } diff --git a/gix-credentials/tests/helper/invoke.rs b/gix-credentials/tests/helper/invoke.rs index 8b60c5476e..9f6f0dd1e9 100644 --- a/gix-credentials/tests/helper/invoke.rs +++ b/gix-credentials/tests/helper/invoke.rs @@ -92,7 +92,7 @@ mod program { assert_eq!( gix_credentials::helper::invoke( &mut Program::from_custom_definition( - gix_path::into_bstr(gix_path::realpath(gix_testtools::fixture_path("custom-helper.sh"))?) + gix_path::into_bstr(gix_path::realpath(gix_testtools::fixture_path("custom-helper.sh")).map_err(gix_error::Exn::into_error)?) .into_owned() ), &helper::Action::get_for_url("/does/not/matter"), diff --git a/gix-diff/tests/Cargo.toml b/gix-diff/tests/Cargo.toml index a2cd30864c..a84e9ec891 100644 --- a/gix-diff/tests/Cargo.toml +++ b/gix-diff/tests/Cargo.toml @@ -18,6 +18,7 @@ path = "diff/main.rs" [dev-dependencies] gix-diff = { path = "..", features = ["blob-experimental"] } +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-index = { path = "../../gix-index" } gix-pathspec = { path = "../../gix-pathspec" } gix-hash = { path = "../../gix-hash" } diff --git a/gix-diff/tests/diff/index.rs b/gix-diff/tests/diff/index.rs index 140703bece..b8d3ef5be1 100644 --- a/gix-diff/tests/diff/index.rs +++ b/gix-diff/tests/diff/index.rs @@ -333,10 +333,10 @@ fn renames_by_similarity_with_limit() -> crate::Result { assert_eq!( actual_ids, [ - gix_hash::ObjectId::from_str("f00c965d8307308469e537302baa73048488f162")?, - gix_hash::ObjectId::from_str("683cfcc0f47566c332aa45d81c5cc98acb4aab49")?, - gix_hash::ObjectId::from_str("3bb459b831ea471b9cd1cbb7c6d54a74251a711b")?, - gix_hash::ObjectId::from_str("0a805f8e02d72bd354c1f00607906de2e49e00d6")?, + gix_hash::ObjectId::from_str("f00c965d8307308469e537302baa73048488f162").map_err(gix_error::Exn::into_error)?, + gix_hash::ObjectId::from_str("683cfcc0f47566c332aa45d81c5cc98acb4aab49").map_err(gix_error::Exn::into_error)?, + gix_hash::ObjectId::from_str("3bb459b831ea471b9cd1cbb7c6d54a74251a711b").map_err(gix_error::Exn::into_error)?, + gix_hash::ObjectId::from_str("0a805f8e02d72bd354c1f00607906de2e49e00d6").map_err(gix_error::Exn::into_error)?, ] ); @@ -1342,7 +1342,7 @@ mod util { let hex_id = std::fs::read_to_string(&tree_id_path).map_err(|err| { std::io::Error::other(format!("Could not read '{}': {}", tree_id_path.display(), err)) })?; - let tree_id = gix_hash::ObjectId::from_hex(hex_id.trim().as_bytes())?; + let tree_id = gix_hash::ObjectId::from_hex(hex_id.trim().as_bytes()).map_err(gix_error::Exn::into_error)?; Ok(gix_index::State::from_tree(&tree_id, odb, Default::default())?) } } diff --git a/gix-diff/tests/diff/main.rs b/gix-diff/tests/diff/main.rs index 667f1daf8d..cd3735c5c9 100644 --- a/gix-diff/tests/diff/main.rs +++ b/gix-diff/tests/diff/main.rs @@ -52,7 +52,7 @@ mod util { impl ObjectDb { /// Insert `data` and return its hash. That can be used to find it again. pub fn insert(&mut self, data: &str) -> Result { - let id = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Blob, data.as_bytes())?; + let id = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Blob, data.as_bytes()).map_err(gix_error::Exn::into_error)?; self.data_by_id.insert(id, data.into()); Ok(id) } diff --git a/gix-diff/tests/diff/tree_with_rewrites.rs b/gix-diff/tests/diff/tree_with_rewrites.rs index 9bfd57d4fd..8544f08318 100644 --- a/gix-diff/tests/diff/tree_with_rewrites.rs +++ b/gix-diff/tests/diff/tree_with_rewrites.rs @@ -1862,7 +1862,7 @@ mod util { let tree_id_path = root.join(tree).with_extension("tree"); let hex_id = std::fs::read_to_string(&tree_id_path) .map_err(|err| std::io::Error::other(format!("Could not read '{}': {}", tree_id_path.display(), err)))?; - let tree_id = gix_hash::ObjectId::from_hex(hex_id.trim().as_bytes())?; + let tree_id = gix_hash::ObjectId::from_hex(hex_id.trim().as_bytes()).map_err(gix_error::Exn::into_error)?; let mut buf = Vec::new(); odb.find_tree(&tree_id, &mut buf)?; Ok(buf) diff --git a/gix-dir/Cargo.toml b/gix-dir/Cargo.toml index 94e960f508..ded3796084 100644 --- a/gix-dir/Cargo.toml +++ b/gix-dir/Cargo.toml @@ -30,6 +30,7 @@ bstr = { version = "1.12.0", default-features = false } thiserror = "2.0.18" [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-testtools = { path = "../tests/tools" } gix-fs = { path = "../gix-fs" } pretty_assertions = "1.4.0" diff --git a/gix-dir/tests/dir/walk.rs b/gix-dir/tests/dir/walk.rs index 2359459d66..fd7764f1c7 100644 --- a/gix-dir/tests/dir/walk.rs +++ b/gix-dir/tests/dir/walk.rs @@ -499,7 +499,7 @@ fn ignored_dir_with_cwd_handling() -> crate::Result { "even if the traversal root is for deletion, unless the CWD is set it will be collapsed (no special cases)" ); - let real_root = gix_path::realpath(&root)?; + let real_root = gix_path::realpath(&root).map_err(gix_error::Exn::into_error)?; let ((out, _root), entries) = collect_filtered_with_cwd( &real_root, Some(&real_root.join("ignored")), @@ -535,7 +535,7 @@ fn ignored_dir_with_cwd_handling() -> crate::Result { "the traversal starts from the top, but we automatically prevent the 'd' directory from being deleted by stopping its collapse." ); - let real_root = gix_path::realpath(fixture("subdir-untracked-and-ignored"))?; + let real_root = gix_path::realpath(fixture("subdir-untracked-and-ignored")).map_err(gix_error::Exn::into_error)?; let ((out, _root), entries) = collect_filtered_with_cwd( &real_root, None, @@ -579,7 +579,7 @@ fn ignored_dir_with_cwd_handling() -> crate::Result { #[test] fn ignored_with_cwd_handling() -> crate::Result { - let root = gix_path::realpath(fixture("ignored-with-empty"))?; + let root = gix_path::realpath(fixture("ignored-with-empty")).map_err(gix_error::Exn::into_error)?; let ((out, _root), entries) = collect_filtered_with_cwd( &root, None, @@ -731,7 +731,7 @@ fn only_untracked_with_cwd_handling() -> crate::Result { "even if the traversal root is for deletion, unless the CWD is set it will be collapsed (no special cases)" ); - let real_root = gix_path::realpath(&root)?; + let real_root = gix_path::realpath(&root).map_err(gix_error::Exn::into_error)?; let ((out, _root), entries) = collect_filtered_with_cwd( &real_root, Some(&real_root), @@ -2385,7 +2385,7 @@ fn untracked_and_ignored_collapse_handling_for_deletion_mixed() -> crate::Result but also how 'd/d' collapses as our current working directory the worktree" ); - let real_root = gix_path::realpath(&root)?; + let real_root = gix_path::realpath(&root).map_err(gix_error::Exn::into_error)?; let ((out, _root), entries) = collect_filtered_with_cwd( &real_root, Some(&real_root), diff --git a/gix-discover/Cargo.toml b/gix-discover/Cargo.toml index ff1fd153dd..351aa68de5 100644 --- a/gix-discover/Cargo.toml +++ b/gix-discover/Cargo.toml @@ -27,6 +27,7 @@ thiserror = "2.0.18" dunce = "1.0.3" [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-testtools = { path = "../tests/tools" } serial_test = { version = "3.3.1", default-features = false } is_ci = "1.1.1" diff --git a/gix-discover/tests/discover/upwards/ceiling_dirs.rs b/gix-discover/tests/discover/upwards/ceiling_dirs.rs index f0b36b602e..b5694f0e4c 100644 --- a/gix-discover/tests/discover/upwards/ceiling_dirs.rs +++ b/gix-discover/tests/discover/upwards/ceiling_dirs.rs @@ -183,7 +183,7 @@ fn no_matching_ceiling_dirs_errors_by_default() -> crate::Result { fn ceilings_are_adjusted_to_match_search_dir() -> crate::Result { let relative_work_dir = repo_path()?; let cwd = std::env::current_dir()?; - let absolute_ceiling_dir = gix_path::realpath_opts(&relative_work_dir, &cwd, 8)?; + let absolute_ceiling_dir = gix_path::realpath_opts(&relative_work_dir, &cwd, 8).map_err(gix_error::Exn::into_error)?; let dir = relative_work_dir.join("some"); assert!(dir.is_relative()); let (repo_path, _trust) = gix_discover::upwards_opts( @@ -196,7 +196,7 @@ fn ceilings_are_adjusted_to_match_search_dir() -> crate::Result { assert_repo_is_current_workdir(repo_path, &relative_work_dir); assert!(relative_work_dir.is_relative()); - let absolute_dir = gix_path::realpath_opts(relative_work_dir.join("some").as_ref(), &cwd, 8)?; + let absolute_dir = gix_path::realpath_opts(relative_work_dir.join("some").as_ref(), &cwd, 8).map_err(gix_error::Exn::into_error)?; let (repo_path, _trust) = gix_discover::upwards_opts( &absolute_dir, Options { diff --git a/gix-error/src/exn/impls.rs b/gix-error/src/exn/impls.rs index 392cecc44f..7f51bc535b 100644 --- a/gix-error/src/exn/impls.rs +++ b/gix-error/src/exn/impls.rs @@ -28,6 +28,12 @@ impl From for Exn { } } +impl From for Exn { + fn from(v: std::convert::Infallible) -> Self { + match v {} + } +} + impl Exn { /// Create a new exception with the given error. /// diff --git a/gix-features/Cargo.toml b/gix-features/Cargo.toml index 2c051ea980..f304a5fdd5 100644 --- a/gix-features/Cargo.toml +++ b/gix-features/Cargo.toml @@ -57,7 +57,7 @@ crc32 = ["dep:crc32fast"] ## Enable the usage of zlib-related utilities to compress or decompress data. ## This enables and uses the high-performance `zlib-rs` backend. -zlib = ["dep:zlib-rs", "dep:thiserror"] +zlib = ["dep:zlib-rs", "dep:gix-error"] #! ### Other @@ -109,7 +109,7 @@ bytes = { version = "1.11.1", optional = true } # zlib module zlib-rs = { version = "0.6.0", optional = true } -thiserror = { version = "2.0.18", optional = true } +gix-error = { version = "^0.0.0", path = "../gix-error", optional = true } # Note: once_cell is kept for OnceCell type because std::sync::OnceLock::get_or_try_init() is not yet stable. # Once it's stabilized (tracking issue #109737), we can remove this dependency. diff --git a/gix-features/src/zlib/mod.rs b/gix-features/src/zlib/mod.rs index 42489d4c42..40b3d30576 100644 --- a/gix-features/src/zlib/mod.rs +++ b/gix-features/src/zlib/mod.rs @@ -1,4 +1,4 @@ -use zlib_rs::InflateError; +use gix_error::{message, ErrorExt as _, Exn, Message}; /// A type to hold all state needed for decompressing a ZLIB encoded stream. pub struct Decompress(zlib_rs::Inflate); @@ -46,7 +46,15 @@ impl Decompress { FlushDecompress::Finish => zlib_rs::InflateFlush::Finish, }; - let status = self.0.decompress(input, output, inflate_flush)?; + let status = self.0.decompress(input, output, inflate_flush).map_err(|e| { + match e { + zlib_rs::InflateError::NeedDict { .. } => message("Decompressing this input requires a dictionary"), + zlib_rs::InflateError::StreamError => message("stream error"), + zlib_rs::InflateError::DataError => message("Invalid input data"), + zlib_rs::InflateError::MemError => message("Not enough memory"), + } + .raise() + })?; match status { zlib_rs::Status::Ok => Ok(Status::Ok), zlib_rs::Status::BufError => Ok(Status::BufError), @@ -56,29 +64,7 @@ impl Decompress { } /// The error produced by [`Decompress::decompress()`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum DecompressError { - #[error("stream error")] - StreamError, - #[error("Not enough memory")] - InsufficientMemory, - #[error("Invalid input data")] - DataError, - #[error("Decompressing this input requires a dictionary")] - NeedDict, -} - -impl From for DecompressError { - fn from(value: InflateError) -> Self { - match value { - InflateError::NeedDict { .. } => DecompressError::NeedDict, - InflateError::StreamError => DecompressError::StreamError, - InflateError::DataError => DecompressError::DataError, - InflateError::MemError => DecompressError::InsufficientMemory, - } - } -} +pub type DecompressError = Exn; /// The status returned by [`Decompress::decompress()`]. #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -122,16 +108,7 @@ pub enum FlushDecompress { /// non-streaming interfaces for decompression pub mod inflate { /// The error returned by various [Inflate methods][super::Inflate] - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("Could not write all bytes when decompressing content")] - WriteInflated(#[from] std::io::Error), - #[error("Could not decode zip stream, status was '{0}'")] - Inflate(#[from] super::DecompressError), - #[error("The zlib status indicated an error, status was '{0:?}'")] - Status(super::Status), - } + pub type Error = gix_error::Exn; } /// Decompress a few bytes of a zlib stream without allocation @@ -144,9 +121,13 @@ pub struct Inflate { impl Inflate { /// Run the decompressor exactly once. Cannot be run multiple times pub fn once(&mut self, input: &[u8], out: &mut [u8]) -> Result<(Status, usize, usize), inflate::Error> { + use gix_error::ResultExt; let before_in = self.state.total_in(); let before_out = self.state.total_out(); - let status = self.state.decompress(input, out, FlushDecompress::None)?; + let status = self + .state + .decompress(input, out, FlushDecompress::None) + .or_raise(|| message("Could not decode zip stream"))?; Ok(( status, (self.state.total_in() - before_in) as usize, diff --git a/gix-features/src/zlib/stream/deflate/mod.rs b/gix-features/src/zlib/stream/deflate/mod.rs index 1e6f7650ef..e7f60938ab 100644 --- a/gix-features/src/zlib/stream/deflate/mod.rs +++ b/gix-features/src/zlib/stream/deflate/mod.rs @@ -1,5 +1,6 @@ +use gix_error::{message, ErrorExt as _, Exn, Message}; + use crate::zlib::Status; -use zlib_rs::DeflateError; const BUF_SIZE: usize = 4096 * 8; @@ -67,7 +68,14 @@ impl Compress { FlushCompress::Full => zlib_rs::DeflateFlush::FullFlush, FlushCompress::Finish => zlib_rs::DeflateFlush::Finish, }; - let status = self.0.compress(input, output, flush)?; + let status = self.0.compress(input, output, flush).map_err(|e| { + match e { + zlib_rs::DeflateError::StreamError => message("stream error"), + zlib_rs::DeflateError::DataError => message("The input is not a valid deflate stream."), + zlib_rs::DeflateError::MemError => message("Not enough memory"), + } + .raise() + })?; match status { zlib_rs::Status::Ok => Ok(Status::Ok), zlib_rs::Status::BufError => Ok(Status::BufError), @@ -77,26 +85,7 @@ impl Compress { } /// The error produced by [`Compress::compress()`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum CompressError { - #[error("stream error")] - StreamError, - #[error("The input is not a valid deflate stream.")] - DataError, - #[error("Not enough memory")] - InsufficientMemory, -} - -impl From for CompressError { - fn from(value: zlib_rs::DeflateError) -> Self { - match value { - DeflateError::StreamError => CompressError::StreamError, - DeflateError::DataError => CompressError::DataError, - DeflateError::MemError => CompressError::InsufficientMemory, - } - } -} +pub type CompressError = Exn; /// Values which indicate the form of flushing to be used when compressing /// in-memory data. @@ -186,7 +175,7 @@ mod impls { let status = self .compressor .compress(buf, &mut self.buf, flush) - .map_err(io::Error::other)?; + .map_err(|e| io::Error::other(e.into_error()))?; let written = self.compressor.total_out() - last_total_out; if written > 0 { diff --git a/gix-fs/Cargo.toml b/gix-fs/Cargo.toml index 3d1a992475..4756ec7ba5 100644 --- a/gix-fs/Cargo.toml +++ b/gix-fs/Cargo.toml @@ -23,7 +23,7 @@ bstr = "1.12.0" gix-path = { version = "^0.11.0", path = "../gix-path" } gix-features = { version = "^0.46.0", path = "../gix-features", features = ["fs-read-dir"] } gix-utils = { version = "^0.3.1", path = "../gix-utils" } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["std", "derive"] } # For `Capabilities` to assure parallel operation works. diff --git a/gix-fs/src/stack.rs b/gix-fs/src/stack.rs index 11aebe3f83..c952717160 100644 --- a/gix-fs/src/stack.rs +++ b/gix-fs/src/stack.rs @@ -5,21 +5,14 @@ use std::{ use bstr::{BStr, BString, ByteSlice}; +use gix_error::ErrorExt; + use crate::Stack; /// pub mod to_normal_path_components { - use std::path::PathBuf; - /// The error used in [`ToNormalPathComponents::to_normal_path_components()`](super::ToNormalPathComponents::to_normal_path_components()). - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("Input path \"{path}\" contains relative or absolute components", path = .0.display())] - NotANormalComponent(PathBuf), - #[error("Could not convert to UTF8 or from UTF8 due to ill-formed input")] - IllegalUtf8, - } + pub type Error = gix_error::Exn; } /// Obtain an iterator over `OsStr`-components which are normal, none-relative and not absolute. @@ -46,9 +39,11 @@ fn component_to_os_str<'a>( ) -> Result<&'a OsStr, to_normal_path_components::Error> { match component { Component::Normal(os_str) => Ok(os_str), - _ => Err(to_normal_path_components::Error::NotANormalComponent( - path_with_component.to_owned(), - )), + _ => Err(gix_error::message!( + "Input path \"{}\" contains relative or absolute components", + path_with_component.display() + ) + .raise()), } } @@ -81,7 +76,7 @@ fn bytes_component_to_os_str<'a>( return None; } let component = match gix_path::try_from_byte_slice(component.as_bstr()) - .map_err(|_| to_normal_path_components::Error::IllegalUtf8) + .map_err(|_| gix_error::message("Could not convert to UTF8 or from UTF8 due to ill-formed input").raise()) { Ok(c) => c, Err(err) => return Some(Err(err)), @@ -197,7 +192,7 @@ impl Stack { } while let Some(comp) = components.next() { - let comp = comp.map_err(std::io::Error::other)?; + let comp = comp.map_err(|e| std::io::Error::other(e.into_error()))?; let is_last_component = components.peek().is_none(); self.current_is_directory = !is_last_component; self.current.push(comp); diff --git a/gix-hash/Cargo.toml b/gix-hash/Cargo.toml index dd271d6893..8028ddfd04 100644 --- a/gix-hash/Cargo.toml +++ b/gix-hash/Cargo.toml @@ -27,7 +27,7 @@ serde = ["dep:serde", "faster-hex/serde"] [dependencies] gix-features = { version = "^0.46.0", path = "../gix-features", features = ["progress"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } faster-hex = { version = "0.10.0", default-features = false, features = ["std"] } serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"] } sha1-checked = { version = "0.10.0", optional = true, default-features = false } diff --git a/gix-hash/src/hasher.rs b/gix-hash/src/hasher.rs index a6f25382f2..8652424721 100644 --- a/gix-hash/src/hasher.rs +++ b/gix-hash/src/hasher.rs @@ -1,10 +1,5 @@ /// The error returned by [`Hasher::try_finalize()`](crate::Hasher::try_finalize()). -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("Detected SHA-1 collision attack with digest {digest}")] - CollisionAttack { digest: crate::ObjectId }, -} +pub type Error = gix_error::Exn; pub(super) mod _impl { #[cfg(feature = "sha1")] @@ -13,7 +8,7 @@ pub(super) mod _impl { #[cfg(all(not(feature = "sha1"), feature = "sha256"))] use sha2::Digest; - use crate::hasher::Error; + use gix_error::ErrorExt as _; /// Hash implementations that can be used once. #[derive(Clone)] @@ -64,7 +59,7 @@ pub(super) mod _impl { // turning the return type into `Result` when this crate is // compiled with SHA-256 support only. #[inline] - pub fn try_finalize(self) -> Result { + pub fn try_finalize(self) -> Result { match self { #[cfg(feature = "sha1")] Hasher::Sha1(sha1) => match sha1.try_finalize() { @@ -83,9 +78,11 @@ pub(super) mod _impl { std::hint::unreachable_unchecked() } } - CollisionResult::Collision(digest) => Err(Error::CollisionAttack { - digest: crate::ObjectId::Sha1(digest.into()), - }), + CollisionResult::Collision(digest) => Err(gix_error::message!( + "Detected SHA-1 collision attack with digest {}", + crate::ObjectId::Sha1(digest.into()) + ) + .raise()), }, #[cfg(feature = "sha256")] Hasher::Sha256(sha256) => Ok(crate::ObjectId::Sha256(sha256.finalize().into())), diff --git a/gix-hash/src/io.rs b/gix-hash/src/io.rs index 49a51e36e7..db9bc87cff 100644 --- a/gix-hash/src/io.rs +++ b/gix-hash/src/io.rs @@ -1,16 +1,9 @@ -use crate::hasher; - /// The error type for I/O operations that compute hashes. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error(transparent)] - Io(#[from] std::io::Error), - #[error("Failed to hash data")] - Hasher(#[from] hasher::Error), -} +pub type Error = gix_error::Exn; pub(super) mod _impl { + use gix_error::{message, ErrorExt as _, ResultExt as _}; + use crate::{hasher, io::Error, Hasher}; /// Compute the hash of `kind` for the bytes in the file at `path`, hashing only the first `num_bytes_from_start` @@ -30,7 +23,7 @@ pub(super) mod _impl { should_interrupt: &std::sync::atomic::AtomicBool, ) -> Result { bytes( - &mut std::fs::File::open(path)?, + &mut std::fs::File::open(path).or_raise(|| message("Failed to open file"))?, num_bytes_from_start, kind, progress, @@ -70,16 +63,16 @@ pub(super) mod _impl { while bytes_left > 0 { let out = &mut buf[..BUF_SIZE.min(bytes_left as usize)]; - read.read_exact(out)?; + read.read_exact(out).or_raise(|| message("Failed to read data"))?; bytes_left -= out.len() as u64; progress.inc_by(out.len()); hasher.update(out); if should_interrupt.load(std::sync::atomic::Ordering::SeqCst) { - return Err(std::io::Error::other("Interrupted").into()); + return Err(gix_error::message("Interrupted").raise()); } } - let id = hasher.try_finalize()?; + let id = hasher.try_finalize().or_raise(|| message("Failed to hash data"))?; progress.show_throughput(start); Ok(id) } diff --git a/gix-hash/src/object_id.rs b/gix-hash/src/object_id.rs index 8dbba3f3b8..397abb21ba 100644 --- a/gix-hash/src/object_id.rs +++ b/gix-hash/src/object_id.rs @@ -42,6 +42,8 @@ impl Hash for ObjectId { pub mod decode { use std::str::FromStr; + use gix_error::ErrorExt as _; + use crate::object_id::ObjectId; #[cfg(feature = "sha1")] @@ -51,14 +53,7 @@ pub mod decode { use crate::{SIZE_OF_SHA256_DIGEST, SIZE_OF_SHA256_HEX_DIGEST}; /// An error returned by [`ObjectId::from_hex()`][crate::ObjectId::from_hex()] - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("A hash sized {0} hexadecimal characters is invalid")] - InvalidHexEncodingLength(usize), - #[error("Invalid character encountered")] - Invalid, - } + pub type Error = gix_error::Exn; /// Hash decoding impl ObjectId { @@ -74,7 +69,9 @@ pub mod decode { ObjectId::Sha1({ let mut buf = [0; SIZE_OF_SHA1_DIGEST]; faster_hex::hex_decode(buffer, &mut buf).map_err(|err| match err { - faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => Error::Invalid, + faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => { + gix_error::message("Invalid character encountered").raise() + } faster_hex::Error::InvalidLength(_) => { unreachable!("BUG: This is already checked") } @@ -87,7 +84,9 @@ pub mod decode { ObjectId::Sha256({ let mut buf = [0; SIZE_OF_SHA256_DIGEST]; faster_hex::hex_decode(buffer, &mut buf).map_err(|err| match err { - faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => Error::Invalid, + faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => { + gix_error::message("Invalid character encountered").raise() + } faster_hex::Error::InvalidLength(_) => { unreachable!("BUG: This is already checked") } @@ -95,7 +94,7 @@ pub mod decode { buf }) }), - len => Err(Error::InvalidHexEncodingLength(len)), + len => Err(gix_error::message!("A hash sized {len} hexadecimal characters is invalid").raise()), } } } diff --git a/gix-hash/src/oid.rs b/gix-hash/src/oid.rs index 3b3b95e380..0e7cb4a032 100644 --- a/gix-hash/src/oid.rs +++ b/gix-hash/src/oid.rs @@ -1,5 +1,7 @@ use std::hash; +use gix_error::ErrorExt as _; + use crate::{Kind, ObjectId}; #[cfg(feature = "sha1")] @@ -73,12 +75,7 @@ impl std::fmt::Debug for oid { } /// The error returned when trying to convert a byte slice to an [`oid`] or [`ObjectId`] -#[allow(missing_docs)] -#[derive(Debug, thiserror::Error)] -pub enum Error { - #[error("Cannot instantiate git hash from a digest of length {0}")] - InvalidByteSliceLength(usize), -} +pub type Error = gix_error::Exn; /// Conversion impl oid { @@ -100,7 +97,7 @@ impl oid { &*(std::ptr::from_ref::<[u8]>(digest) as *const oid) }, ), - len => Err(Error::InvalidByteSliceLength(len)), + len => Err(gix_error::message!("Cannot instantiate git hash from a digest of length {len}").raise()), } } diff --git a/gix-hash/src/prefix.rs b/gix-hash/src/prefix.rs index bfb5b4d26d..039d3a52dd 100644 --- a/gix-hash/src/prefix.rs +++ b/gix-hash/src/prefix.rs @@ -1,36 +1,16 @@ use std::cmp::Ordering; +use gix_error::ErrorExt as _; + use crate::{oid, ObjectId, Prefix}; /// The error returned by [`Prefix::new()`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error( - "The minimum hex length of a short object id is {}, got {hex_len}", - Prefix::MIN_HEX_LEN - )] - TooShort { hex_len: usize }, - #[error("An object of kind {object_kind} cannot be larger than {} in hex, but {hex_len} was requested", object_kind.len_in_hex())] - TooLong { object_kind: crate::Kind, hex_len: usize }, -} +pub type Error = gix_error::Exn; /// pub mod from_hex { /// The error returned by [`Prefix::from_hex`][super::Prefix::from_hex()]. - #[derive(Debug, Eq, PartialEq, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error( - "The minimum hex length of a short object id is {}, got {hex_len}", - super::Prefix::MIN_HEX_LEN - )] - TooShort { hex_len: usize }, - #[error("An id cannot be larger than {} chars in hex, but {hex_len} was requested", crate::Kind::longest().len_in_hex())] - TooLong { hex_len: usize }, - #[error("Invalid hex character")] - Invalid, - } + pub type Error = gix_error::Exn; } impl Prefix { @@ -43,12 +23,18 @@ impl Prefix { /// wide, with all other bytes and bits set to zero. pub fn new(id: &oid, hex_len: usize) -> Result { if hex_len > id.kind().len_in_hex() { - Err(Error::TooLong { - object_kind: id.kind(), - hex_len, - }) + Err(gix_error::message!( + "An object of kind {} cannot be larger than {} in hex, but {hex_len} was requested", + id.kind(), + id.kind().len_in_hex() + ) + .raise()) } else if hex_len < Self::MIN_HEX_LEN { - Err(Error::TooShort { hex_len }) + Err(gix_error::message!( + "The minimum hex length of a short object id is {}, got {hex_len}", + Self::MIN_HEX_LEN + ) + .raise()) } else { let mut prefix = ObjectId::null(id.kind()); let b = prefix.as_mut_slice(); @@ -97,7 +83,11 @@ impl Prefix { pub fn from_hex(value: &str) -> Result { let hex_len = value.len(); if hex_len < Self::MIN_HEX_LEN { - return Err(from_hex::Error::TooShort { hex_len }); + return Err(gix_error::message!( + "The minimum hex length of a short object id is {}, got {hex_len}", + Self::MIN_HEX_LEN + ) + .raise()); } Self::from_hex_nonempty(value) } @@ -108,9 +98,17 @@ impl Prefix { let hex_len = value.len(); if hex_len > crate::Kind::longest().len_in_hex() { - return Err(from_hex::Error::TooLong { hex_len }); + return Err(gix_error::message!( + "An id cannot be larger than {} chars in hex, but {hex_len} was requested", + crate::Kind::longest().len_in_hex() + ) + .raise()); } else if hex_len == 0 { - return Err(from_hex::Error::TooShort { hex_len }); + return Err(gix_error::message!( + "The minimum hex length of a short object id is {}, got {hex_len}", + Self::MIN_HEX_LEN + ) + .raise()); } let src = if value.len() % 2 == 0 { @@ -126,7 +124,9 @@ impl Prefix { faster_hex::hex_decode(src, &mut out).map(move |_| out) } .map_err(|e| match e { - faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => from_hex::Error::Invalid, + faster_hex::Error::InvalidChar | faster_hex::Error::Overflow => { + gix_error::message("Invalid hex character").raise() + } faster_hex::Error::InvalidLength(_) => panic!("This is already checked"), })?; diff --git a/gix-hash/src/verify.rs b/gix-hash/src/verify.rs index 7fba80549b..08f77d4b88 100644 --- a/gix-hash/src/verify.rs +++ b/gix-hash/src/verify.rs @@ -1,13 +1,9 @@ -use crate::{oid, ObjectId}; +use gix_error::ErrorExt as _; + +use crate::oid; /// The error returned by [`oid::verify()`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -#[error("Hash was {actual}, but should have been {expected}")] -pub struct Error { - pub actual: ObjectId, - pub expected: ObjectId, -} +pub type Error = gix_error::Exn; impl oid { /// Verify that `self` matches the `expected` object ID. @@ -18,10 +14,12 @@ impl oid { if self == expected { Ok(()) } else { - Err(Error { - actual: self.to_owned(), - expected: expected.to_owned(), - }) + Err(gix_error::message!( + "Hash was {}, but should have been {}", + self.to_owned(), + expected.to_owned() + ) + .raise()) } } } diff --git a/gix-hash/tests/hash/hasher.rs b/gix-hash/tests/hash/hasher.rs index 4208f86b17..9fa85706a5 100644 --- a/gix-hash/tests/hash/hasher.rs +++ b/gix-hash/tests/hash/hasher.rs @@ -29,19 +29,21 @@ fn size_of_hasher_sha1_and_sha256() { #[test] #[cfg(all(not(feature = "sha256"), feature = "sha1"))] fn size_of_try_finalize_return_type_sha1_only() { - assert_eq!( - std::mem::size_of::>(), - 21, - "The size of the return value is just 1 byte larger than just returning the object hash itself" + let oid_size = std::mem::size_of::(); + let result_size = std::mem::size_of::>(); + assert!( + result_size <= oid_size + 8, + "The boxed error keeps the Result size close to ObjectId: result={result_size}, oid={oid_size}" ); } #[test] #[cfg(all(feature = "sha256", feature = "sha1"))] fn size_of_try_finalize_return_type_sha1_and_sha256() { - assert_eq!( - std::mem::size_of::>(), - 34, - "The size of the return value is just 2 bytes larger than just returning the object hash itself" + let oid_size = std::mem::size_of::(); + let result_size = std::mem::size_of::>(); + assert!( + result_size <= oid_size + 8, + "The boxed error keeps the Result size close to ObjectId: result={result_size}, oid={oid_size}" ); } diff --git a/gix-hash/tests/hash/object_id.rs b/gix-hash/tests/hash/object_id.rs index eea2576527..01dc32deaf 100644 --- a/gix-hash/tests/hash/object_id.rs +++ b/gix-hash/tests/hash/object_id.rs @@ -17,29 +17,29 @@ mod from_hex { } mod invalid { - use gix_hash::{decode, ObjectId}; + use gix_hash::ObjectId; #[test] fn non_hex_characters() { - assert!(matches!( - ObjectId::from_hex(b"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz").unwrap_err(), - decode::Error::Invalid - )); + let err = ObjectId::from_hex(b"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz").unwrap_err(); + assert!(err.to_string().contains("Invalid character encountered"), "got: {err}"); } #[test] fn too_short() { - assert!(matches!( - ObjectId::from_hex(b"abcd").unwrap_err(), - decode::Error::InvalidHexEncodingLength(4) - )); + let err = ObjectId::from_hex(b"abcd").unwrap_err(); + assert!( + err.to_string().contains("hexadecimal characters is invalid"), + "got: {err}" + ); } #[test] fn too_long() { - assert!(matches!( - ObjectId::from_hex(b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaf").unwrap_err(), - decode::Error::InvalidHexEncodingLength(41) - )); + let err = ObjectId::from_hex(b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaf").unwrap_err(); + assert!( + err.to_string().contains("hexadecimal characters is invalid"), + "got: {err}" + ); } } } @@ -62,9 +62,9 @@ fn from_bytes_or_panic_sha256() { mod sha1 { use std::str::FromStr as _; - use gix_hash::{hasher, Kind, ObjectId}; + use gix_hash::{hasher, hasher::Error, Kind, ObjectId}; - fn hash_contents(s: &[u8]) -> Result { + fn hash_contents(s: &[u8]) -> Result { let mut hasher = hasher(Kind::Sha1); hasher.update(s); hasher.try_finalize() @@ -125,15 +125,27 @@ mod sha1 { let expected = ObjectId::from_str("8ac60ba76f1999a1ab70223f225aefdc78d4ddc0").expect("Shambles digest to be valid"); - let Err(hasher::Error::CollisionAttack { digest }) = hash_contents(message_a) else { - panic!("expected Shambles input to collide"); - }; - assert_eq!(digest, expected); + let err = hash_contents(message_a).unwrap_err(); + let err_msg = err.to_string(); + assert!( + err_msg.contains("collision attack"), + "expected collision attack error, got: {err_msg}" + ); + assert!( + err_msg.contains(&expected.to_string()), + "expected digest {expected} in error, got: {err_msg}" + ); - let Err(hasher::Error::CollisionAttack { digest }) = hash_contents(message_b) else { - panic!("expected Shambles input to collide"); - }; - assert_eq!(digest, expected); + let err = hash_contents(message_b).unwrap_err(); + let err_msg = err.to_string(); + assert!( + err_msg.contains("collision attack"), + "expected collision attack error, got: {err_msg}" + ); + assert!( + err_msg.contains(&expected.to_string()), + "expected digest {expected} in error, got: {err_msg}" + ); } } diff --git a/gix-hash/tests/hash/prefix.rs b/gix-hash/tests/hash/prefix.rs index a85c9f8ec4..5aa9480c1b 100644 --- a/gix-hash/tests/hash/prefix.rs +++ b/gix-hash/tests/hash/prefix.rs @@ -138,26 +138,22 @@ mod new { #[test] fn errors_if_hex_len_is_longer_than_oid_len_in_hex() { let kind = Kind::Sha1; - assert!(matches!( - gix_hash::Prefix::new(&ObjectId::null(kind), kind.len_in_hex() + 1), - Err(gix_hash::prefix::Error::TooLong { .. }) - )); + let err = gix_hash::Prefix::new(&ObjectId::null(kind), kind.len_in_hex() + 1).unwrap_err(); + assert!(err.to_string().contains("cannot be larger than"), "got: {err}"); } #[test] fn errors_if_hex_len_is_too_short() { let kind = Kind::Sha1; - assert!(matches!( - gix_hash::Prefix::new(&ObjectId::null(kind), 3), - Err(gix_hash::prefix::Error::TooShort { .. }) - )); + let err = gix_hash::Prefix::new(&ObjectId::null(kind), 3).unwrap_err(); + assert!(err.to_string().contains("minimum hex length"), "got: {err}"); } } mod try_from { use std::cmp::Ordering; - use gix_hash::{prefix::from_hex::Error, Prefix}; + use gix_hash::Prefix; use crate::hex_to_id; @@ -183,41 +179,37 @@ mod try_from { #[test] fn id_to_short() { let input = "ab"; - let expected = Error::TooShort { hex_len: 2 }; let actual = Prefix::try_from(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("minimum hex length"), "got: {actual}"); } #[test] #[cfg(all(not(feature = "sha256"), feature = "sha1"))] fn id_too_long() { let input = "abcdefabcdefabcdefabcdefabcdefabcdefabcd123123123123123123"; - let expected = Error::TooLong { hex_len: 58 }; let actual = Prefix::try_from(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("cannot be larger than"), "got: {actual}"); } #[test] fn id_always_too_long() { let input = "abcdefabcdefabcdefabcdefabcdefabcdefabcd123123123123123123123123123123"; - let expected = Error::TooLong { hex_len: 70 }; let actual = Prefix::try_from(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("cannot be larger than"), "got: {actual}"); } #[test] fn invalid_chars() { let input = "abcdfOsd"; - let expected = Error::Invalid; let actual = Prefix::try_from(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("Invalid hex character"), "got: {actual}"); } } mod from_hex_nonempty { use std::cmp::Ordering; - use gix_hash::{prefix::from_hex::Error, Prefix}; + use gix_hash::Prefix; use crate::hex_to_id; @@ -256,25 +248,22 @@ mod from_hex_nonempty { #[test] fn id_empty() { let input = ""; - let expected = Error::TooShort { hex_len: 0 }; let actual = Prefix::from_hex_nonempty(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("minimum hex length"), "got: {actual}"); } #[test] #[cfg(all(not(feature = "sha256"), feature = "sha1"))] fn id_too_long() { let input = "abcdefabcdefabcdefabcdefabcdefabcdefabcd123123123123123123"; - let expected = Error::TooLong { hex_len: 58 }; let actual = Prefix::from_hex_nonempty(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("cannot be larger than"), "got: {actual}"); } #[test] fn id_always_too_long() { let input = "abcdefabcdefabcdefabcdefabcdefabcdefabcd123123123123123123123123123123"; - let expected = Error::TooLong { hex_len: 70 }; let actual = Prefix::from_hex_nonempty(input).unwrap_err(); - assert_eq!(actual, expected); + assert!(actual.to_string().contains("cannot be larger than"), "got: {actual}"); } } diff --git a/gix-index/Cargo.toml b/gix-index/Cargo.toml index 5c97f22cd5..05aa66e8f1 100644 --- a/gix-index/Cargo.toml +++ b/gix-index/Cargo.toml @@ -26,6 +26,7 @@ gix-features = { version = "^0.46.0", path = "../gix-features", features = [ "progress", ] } gix-hash = { version = "^0.22.0", path = "../gix-hash" } +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-bitmap = { version = "^0.2.15", path = "../gix-bitmap" } gix-object = { version = "^0.55.0", path = "../gix-object" } gix-validate = { version = "^0.11.0", path = "../gix-validate" } diff --git a/gix-index/src/decode/mod.rs b/gix-index/src/decode/mod.rs index 4872c7e23f..e3247db42e 100644 --- a/gix-index/src/decode/mod.rs +++ b/gix-index/src/decode/mod.rs @@ -17,7 +17,7 @@ mod error { #[error(transparent)] Header(#[from] decode::header::Error), #[error("Could not hash index data")] - Hasher(#[from] gix_hash::hasher::Error), + Hasher(#[source] gix_error::Error), #[error("Could not parse entry at index {index}")] Entry { index: u32 }, #[error("Mandatory extension wasn't implemented or malformed.")] @@ -25,7 +25,7 @@ mod error { #[error("Index trailer should have been {expected} bytes long, but was {actual}")] UnexpectedTrailerLength { expected: usize, actual: usize }, #[error("Shared index checksum mismatch")] - Verify(#[from] gix_hash::verify::Error), + Verify(#[source] gix_error::Error), } } pub use error::Error; @@ -66,7 +66,8 @@ impl State { ) -> Result<(Self, Option), Error> { let _span = gix_features::trace::detail!("gix_index::State::from_bytes()", options = ?_options); let (version, num_entries, post_header_data) = header::decode(data, object_hash)?; - let start_of_extensions = extension::end_of_index_entry::decode(data, object_hash)?; + let start_of_extensions = + extension::end_of_index_entry::decode(data, object_hash).map_err(|e| Error::Hasher(e.into_error()))?; let mut num_threads = gix_features::parallel::num_threads(thread_limit); let path_backing_buffer_size = entries::estimate_path_storage_requirements_in_bytes( @@ -216,7 +217,9 @@ impl State { let checksum = gix_hash::ObjectId::from_bytes_or_panic(data); let checksum = (!checksum.is_null()).then_some(checksum); if let Some((expected_checksum, actual_checksum)) = expected_checksum.zip(checksum) { - actual_checksum.verify(&expected_checksum)?; + actual_checksum + .verify(&expected_checksum) + .map_err(|e| Error::Verify(e.into_error()))?; } let EntriesOutcome { entries, diff --git a/gix-index/src/extension/end_of_index_entry/write.rs b/gix-index/src/extension/end_of_index_entry/write.rs index 649edae4c2..c7c4ab74e4 100644 --- a/gix-index/src/extension/end_of_index_entry/write.rs +++ b/gix-index/src/extension/end_of_index_entry/write.rs @@ -1,3 +1,5 @@ +use gix_error::ResultExt; + use crate::extension::{end_of_index_entry::SIGNATURE, Signature}; /// Write this extension to out and generate a hash of `hash_kind` over all `prior_extensions` which are specified as `(signature, size)` @@ -12,18 +14,22 @@ pub fn write_to( offset_to_extensions: u32, prior_extensions: impl IntoIterator, ) -> Result<(), gix_hash::io::Error> { - out.write_all(&SIGNATURE)?; + out.write_all(&SIGNATURE) + .or_raise(|| gix_error::message("Could not write EOIE signature"))?; let extension_size: u32 = 4 + hash_kind.len_in_bytes() as u32; - out.write_all(&extension_size.to_be_bytes())?; + out.write_all(&extension_size.to_be_bytes()) + .or_raise(|| gix_error::message("Could not write EOIE extension size"))?; - out.write_all(&offset_to_extensions.to_be_bytes())?; + out.write_all(&offset_to_extensions.to_be_bytes()) + .or_raise(|| gix_error::message("Could not write EOIE offset"))?; let mut hasher = gix_hash::hasher(hash_kind); for (signature, size) in prior_extensions { hasher.update(&signature); hasher.update(&size.to_be_bytes()); } - out.write_all(hasher.try_finalize()?.as_slice())?; + out.write_all(hasher.try_finalize()?.as_slice()) + .or_raise(|| gix_error::message("Could not write EOIE hash"))?; Ok(()) } diff --git a/gix-index/src/file/init.rs b/gix-index/src/file/init.rs index 29419e241f..8dd7251228 100644 --- a/gix-index/src/file/init.rs +++ b/gix-index/src/file/init.rs @@ -82,12 +82,9 @@ impl File { &mut gix_features::progress::Discard, &Default::default(), ) - .map_err(|err| match err { - gix_hash::io::Error::Io(err) => Error::Io(err), - gix_hash::io::Error::Hasher(err) => Error::Decode(err.into()), - })? + .map_err(|e| Error::Decode(decode::Error::Hasher(e.into_error())))? .verify(&expected) - .map_err(decode::Error::from)?; + .map_err(|e| Error::Decode(decode::Error::Verify(e.into_error())))?; } } diff --git a/gix-index/src/file/verify.rs b/gix-index/src/file/verify.rs index ee3be86cc9..9d6ddf0e15 100644 --- a/gix-index/src/file/verify.rs +++ b/gix-index/src/file/verify.rs @@ -8,9 +8,9 @@ mod error { #[allow(missing_docs)] pub enum Error { #[error("Could not read index file to generate hash")] - Io(#[from] gix_hash::io::Error), + Io(#[source] gix_error::Error), #[error("Index checksum mismatch")] - Verify(#[from] gix_hash::verify::Error), + Verify(#[source] gix_error::Error), } } pub use error::Error; @@ -20,8 +20,12 @@ impl File { pub fn verify_integrity(&self) -> Result<(), Error> { let _span = gix_features::trace::coarse!("gix_index::File::verify_integrity()"); if let Some(checksum) = self.checksum { - let num_bytes_to_hash = - self.path.metadata().map_err(gix_hash::io::Error::from)?.len() - checksum.as_bytes().len() as u64; + let num_bytes_to_hash = self + .path + .metadata() + .map_err(|e| Error::Io(gix_error::ErrorExt::raise(e).into_error()))? + .len() + - checksum.as_bytes().len() as u64; let should_interrupt = AtomicBool::new(false); gix_hash::bytes_of_file( &self.path, @@ -29,8 +33,10 @@ impl File { checksum.kind(), &mut gix_features::progress::Discard, &should_interrupt, - )? - .verify(&checksum)?; + ) + .map_err(|e| Error::Io(e.into_error()))? + .verify(&checksum) + .map_err(|e| Error::Verify(e.into_error()))?; } Ok(()) } diff --git a/gix-index/src/file/write.rs b/gix-index/src/file/write.rs index 87fd2e2d1b..ba65d48a0f 100644 --- a/gix-index/src/file/write.rs +++ b/gix-index/src/file/write.rs @@ -1,13 +1,15 @@ +use gix_error::ResultExt; + use crate::{write, File, Version}; /// The error produced by [`File::write()`]. #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] pub enum Error { - #[error(transparent)] - Io(#[from] gix_hash::io::Error), + #[error("Could not write index file")] + Io(#[source] gix_error::Error), #[error("Could not acquire lock for index file")] - AcquireLock(#[from] gix_lock::acquire::Error), + AcquireLock(#[source] gix_error::Error), #[error("Could not commit lock for index file")] CommitLock(#[from] gix_lock::commit::Error), } @@ -31,7 +33,8 @@ impl File { let version = self.state.write_to(out, options)?; (version, hasher.hash.try_finalize()?) }; - out.write_all(hash.as_slice())?; + out.write_all(hash.as_slice()) + .or_raise(|| gix_error::message("Could not write index hash"))?; Ok((version, hash)) } @@ -42,12 +45,18 @@ impl File { let _span = gix_features::trace::detail!("gix_index::File::write()", path = ?self.path); let mut lock = std::io::BufWriter::with_capacity( 64 * 1024, - gix_lock::File::acquire_to_update_resource(&self.path, gix_lock::acquire::Fail::Immediately, None)?, + gix_lock::File::acquire_to_update_resource(&self.path, gix_lock::acquire::Fail::Immediately, None) + .map_err(|e| Error::AcquireLock(e.into_error()))?, ); - let (version, digest) = self.write_to(&mut lock, options)?; + let (version, digest) = self + .write_to(&mut lock, options) + .map_err(|e| Error::Io(e.into_error()))?; match lock.into_inner() { Ok(lock) => lock.commit()?, - Err(err) => return Err(Error::Io(err.into_error().into())), + Err(err) => { + let io_err: std::io::Error = err.into_error(); + return Err(Error::Io(gix_error::ErrorExt::raise(io_err).into_error())); + } }; self.state.version = version; self.checksum = Some(digest); diff --git a/gix-index/src/write.rs b/gix-index/src/write.rs index 1226c089d3..e0ae2077c8 100644 --- a/gix-index/src/write.rs +++ b/gix-index/src/write.rs @@ -1,5 +1,7 @@ use std::io::Write; +use gix_error::ResultExt; + use crate::{entry, extension, write::util::CountBytes, State, Version}; /// A way to specify which of the optional extensions to write. @@ -85,9 +87,13 @@ impl State { .try_into() .expect("definitely not too many entries"); - let offset_to_entries = header(&mut write, version, num_entries - removed_entries)?; - let offset_to_extensions = entries(&mut write, self, offset_to_entries)?; - let (extension_toc, out) = self.write_extensions(write, offset_to_extensions, extensions)?; + let offset_to_entries = header(&mut write, version, num_entries - removed_entries) + .or_raise(|| gix_error::message("Could not write index header"))?; + let offset_to_extensions = entries(&mut write, self, offset_to_entries) + .or_raise(|| gix_error::message("Could not write index entries"))?; + let (extension_toc, out) = self + .write_extensions(write, offset_to_extensions, extensions) + .or_raise(|| gix_error::message("Could not write index extensions"))?; if num_entries > 0 && extensions diff --git a/gix-index/tests/Cargo.toml b/gix-index/tests/Cargo.toml index faaa043829..d9dade409a 100644 --- a/gix-index/tests/Cargo.toml +++ b/gix-index/tests/Cargo.toml @@ -19,6 +19,7 @@ path = "integrate.rs" gix-features-parallel = ["gix-features/parallel"] [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-index = { path = ".." } gix-features = { path = "../../gix-features", features = ["progress"] } gix-testtools = { path = "../../tests/tools" } diff --git a/gix-index/tests/index/file/write.rs b/gix-index/tests/index/file/write.rs index 5fec0faf23..5684f887ff 100644 --- a/gix-index/tests/index/file/write.rs +++ b/gix-index/tests/index/file/write.rs @@ -27,7 +27,7 @@ fn roundtrips() -> crate::Result { let expected_bytes = std::fs::read(fixture.to_path())?; let mut out_bytes = Vec::new(); - let (actual_version, _digest) = expected.write_to(&mut out_bytes, options)?; + let (actual_version, _digest) = expected.write_to(&mut out_bytes, options).map_err(gix_error::Exn::into_error)?; let (actual, _) = State::from_bytes(&out_bytes, FileTime::now(), gix_hash::Kind::Sha1, Default::default())?; let name = fixture.to_name(); @@ -108,7 +108,7 @@ fn roundtrips_sparse_index() -> crate::Result { let _expected_bytes = std::fs::read(fixture.to_path())?; let mut out_bytes = Vec::new(); - let (actual_version, _) = expected.write_to(&mut out_bytes, options)?; + let (actual_version, _) = expected.write_to(&mut out_bytes, options).map_err(gix_error::Exn::into_error)?; let (actual, _) = State::from_bytes(&out_bytes, FileTime::now(), gix_hash::Kind::Sha1, Default::default())?; compare_states_against_baseline(&actual, actual_version, &expected, options, fixture.to_name()); @@ -175,7 +175,7 @@ fn extended_flags_automatically_upgrade_the_version_to_avoid_data_loss() -> crat expected.entries_mut()[0].flags.insert(entry::Flags::EXTENDED); let mut buf = Vec::new(); - let (actual_version, _digest) = expected.write_to(&mut buf, Default::default())?; + let (actual_version, _digest) = expected.write_to(&mut buf, Default::default()).map_err(gix_error::Exn::into_error)?; assert_eq!(actual_version, Version::V3, "extended flags need V3"); Ok(()) @@ -191,7 +191,7 @@ fn remove_flag_is_respected() -> crate::Result { entry.flags.toggle(entry::Flags::REMOVE); } let mut buf = Vec::::new(); - index.write_to(&mut buf, Default::default())?; + index.write_to(&mut buf, Default::default()).map_err(gix_error::Exn::into_error)?; let (state, _checksum) = State::from_bytes(&buf, FileTime::now(), gix_hash::Kind::Sha1, Default::default())?; assert_eq!( diff --git a/gix-lock/Cargo.toml b/gix-lock/Cargo.toml index 5a7abb1e77..ada7e1d684 100644 --- a/gix-lock/Cargo.toml +++ b/gix-lock/Cargo.toml @@ -18,7 +18,7 @@ test = true [dependencies] gix-utils = { version = "^0.3.1", default-features = false, path = "../gix-utils" } gix-tempfile = { version = "^21.0.0", default-features = false, path = "../gix-tempfile" } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } [dev-dependencies] tempfile = "3.24.0" diff --git a/gix-lock/src/acquire.rs b/gix-lock/src/acquire.rs index 2c993dbc4e..72c13c3ac4 100644 --- a/gix-lock/src/acquire.rs +++ b/gix-lock/src/acquire.rs @@ -4,6 +4,7 @@ use std::{ time::Duration, }; +use gix_error::{message, ErrorExt}; use gix_tempfile::{AutoRemove, ContainingDirectory}; use crate::{backoff, File, Marker, DOT_LOCK_SUFFIX}; @@ -41,18 +42,7 @@ impl From for Fail { } /// The error returned when acquiring a [`File`] or [`Marker`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("Another IO error occurred while obtaining the lock")] - Io(#[from] std::io::Error), - #[error("The lock for resource '{resource_path}' could not be obtained {mode} after {attempts} attempt(s). The lockfile at '{resource_path}{}' might need manual deletion.", super::DOT_LOCK_SUFFIX)] - PermanentlyLocked { - resource_path: PathBuf, - mode: Fail, - attempts: usize, - }, -} +pub type Error = gix_error::Exn; impl File { /// Create a writable lock file with failure `mode` whose content will eventually overwrite the given resource `at_path`. @@ -190,7 +180,7 @@ fn lock_with_mode( std::thread::sleep(wait); continue; } - Err(err) => return Err(Error::from(err)), + Err(err) => return Err(err.raise().raise(message("Another IO error occurred while obtaining the lock"))), } } try_lock(&lock_path, directory, cleanup) @@ -198,12 +188,13 @@ fn lock_with_mode( } .map(|v| (lock_path, v)) .map_err(|err| match err.kind() { - AlreadyExists => Error::PermanentlyLocked { - resource_path: resource.into(), - mode, - attempts, - }, - _ => Error::Io(err), + AlreadyExists => gix_error::message!( + "The lock for resource '{}' could not be obtained {mode} after {attempts} attempt(s). The lockfile at '{}{}' might need manual deletion.", + resource.display(), + resource.display(), + super::DOT_LOCK_SUFFIX, + ).raise(), + _ => err.raise().raise(message("Another IO error occurred while obtaining the lock")), }) } diff --git a/gix-lock/tests/lock/file.rs b/gix-lock/tests/lock/file.rs index 369455b905..4c4d9a647f 100644 --- a/gix-lock/tests/lock/file.rs +++ b/gix-lock/tests/lock/file.rs @@ -10,7 +10,8 @@ mod close { let resource = dir.path().join("resource-existing.ext"); std::fs::write(&resource, b"old state")?; let resource_lock = resource.with_extension("ext.lock"); - let mut file = gix_lock::File::acquire_to_update_resource(&resource, Fail::Immediately, None)?; + let mut file = gix_lock::File::acquire_to_update_resource(&resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; assert!(resource_lock.is_file()); file.with_mut(|out| out.write_all(b"hello world"))?; let mark = file.close()?; @@ -35,7 +36,8 @@ mod commit { let dir = tempfile::tempdir()?; let resource = dir.path().join("resource-existing.ext"); std::fs::create_dir(&resource)?; - let mark = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None)?; + let mark = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; let lock_path = mark.lock_path().to_owned(); assert!(lock_path.is_file(), "the lock is placed"); @@ -57,7 +59,8 @@ mod commit { let dir = tempfile::tempdir()?; let resource = dir.path().join("resource-existing.ext"); std::fs::create_dir(&resource)?; - let file = gix_lock::File::acquire_to_update_resource(&resource, Fail::Immediately, None)?; + let file = gix_lock::File::acquire_to_update_resource(&resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; let lock_path = file.lock_path().to_owned(); assert!(lock_path.is_file(), "the lock is placed"); @@ -87,7 +90,7 @@ mod commit { } mod acquire { - use std::io::{ErrorKind, Write}; + use std::io::Write; use gix_lock::acquire; @@ -101,7 +104,8 @@ mod acquire { let resource = dir.path().join("a").join("resource-nonexisting"); let resource_lock = resource.with_extension("lock"); let mut file = - gix_lock::File::acquire_to_update_resource(&resource, fail_immediately(), Some(dir.path().into()))?; + gix_lock::File::acquire_to_update_resource(&resource, fail_immediately(), Some(dir.path().into())) + .map_err(gix_error::Exn::into_error)?; assert_eq!(file.lock_path(), resource_lock); assert_eq!(file.resource_path(), resource); assert!(resource_lock.is_file()); @@ -131,7 +135,8 @@ mod acquire { let dir = tempfile::tempdir()?; let resource = dir.path().join("resource-nonexisting.ext"); { - let mut file = gix_lock::File::acquire_to_update_resource(&resource, fail_immediately(), None)?; + let mut file = gix_lock::File::acquire_to_update_resource(&resource, fail_immediately(), None) + .map_err(gix_error::Exn::into_error)?; file.with_mut(|out| out.write_all(b"probably we will be interrupted"))?; } assert!(!resource.is_file(), "the file wasn't created"); @@ -139,17 +144,21 @@ mod acquire { } #[test] - fn lock_non_existing_dir_fails() -> crate::Result { - let dir = tempfile::tempdir()?; + fn lock_non_existing_dir_fails() { + let dir = tempfile::tempdir().unwrap(); let resource = dir.path().join("a").join("resource.ext"); let res = gix_lock::File::acquire_to_update_resource(&resource, fail_immediately(), None); - assert!(matches!(res, Err(acquire::Error::Io(err)) if err.kind() == ErrorKind::NotFound)); + let err = res.expect_err("should fail for non-existing directory"); + let err_str = err.to_string(); + assert!( + err_str.contains("Another IO error occurred while obtaining the lock"), + "error message should mention IO error: {err_str}" + ); assert!(dir.path().is_dir(), "it won't meddle with the containing directory"); assert!(!resource.is_file(), "the resource is not created"); assert!( !resource.parent().unwrap().is_dir(), "parent dire wasn't created either" ); - Ok(()) } } diff --git a/gix-lock/tests/lock/marker.rs b/gix-lock/tests/lock/marker.rs index 02ebe0f84f..ccb5e1fee1 100644 --- a/gix-lock/tests/lock/marker.rs +++ b/gix-lock/tests/lock/marker.rs @@ -7,7 +7,8 @@ mod acquire { fn fail_mode_immediately_produces_a_descriptive_error() -> crate::Result { let dir = tempfile::tempdir()?; let resource = dir.path().join("the-resource"); - let guard = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None)?; + let guard = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; assert!(guard.lock_path().ends_with("the-resource.lock")); assert!(guard.resource_path().ends_with("the-resource")); let err_str = gix_lock::Marker::acquire_to_hold_resource(resource, Fail::Immediately, None) @@ -23,7 +24,8 @@ mod acquire { fn fail_mode_after_duration_fails_after_a_given_duration_or_more() -> crate::Result { let dir = tempfile::tempdir()?; let resource = dir.path().join("the-resource"); - let _guard = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None)?; + let _guard = gix_lock::Marker::acquire_to_hold_resource(&resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; let start = Instant::now(); let time_to_wait = Duration::from_millis(50); let err_str = @@ -70,7 +72,8 @@ mod commit { fn fails_for_ordinary_marker_that_was_never_writable() -> crate::Result { let dir = tempfile::tempdir()?; let resource = dir.path().join("the-resource"); - let mark = gix_lock::Marker::acquire_to_hold_resource(resource, Fail::Immediately, None)?; + let mark = gix_lock::Marker::acquire_to_hold_resource(resource, Fail::Immediately, None) + .map_err(gix_error::Exn::into_error)?; #[cfg(unix)] { use std::os::unix::fs::PermissionsExt; diff --git a/gix-mailmap/Cargo.toml b/gix-mailmap/Cargo.toml index 3014bd220b..1f346a4f10 100644 --- a/gix-mailmap/Cargo.toml +++ b/gix-mailmap/Cargo.toml @@ -22,7 +22,7 @@ serde = ["dep:serde", "bstr/serde", "gix-actor/serde"] gix-actor = { version = "^0.38.0", path = "../gix-actor" } gix-date = { version = "^0.13.0", path = "../gix-date" } bstr = { version = "1.12.0", default-features = false, features = ["std", "unicode"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["derive"] } document-features = { version = "0.2.0", optional = true } diff --git a/gix-mailmap/src/parse.rs b/gix-mailmap/src/parse.rs index 34d3c41761..3f38ad1d38 100644 --- a/gix-mailmap/src/parse.rs +++ b/gix-mailmap/src/parse.rs @@ -1,23 +1,8 @@ -mod error { - use bstr::BString; - - /// The error returned by [`parse()`][crate::parse()]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("Line {line_number} has too many names or emails, or none at all: {line:?}")] - UnconsumedInput { line_number: usize, line: BString }, - #[error("{line_number}: {line:?}: {message}")] - Malformed { - line_number: usize, - line: BString, - message: String, - }, - } -} +/// The error returned by [`parse()`][crate::parse()]. +pub type Error = gix_error::Exn; use bstr::{BStr, ByteSlice}; -pub use error::Error; +use gix_error::{ErrorExt, OptionExt}; use crate::Entry; @@ -61,10 +46,9 @@ fn parse_line(line: &BStr, line_number: usize) -> Result, Error> { let (name1, email1, rest) = parse_name_and_email(line, line_number)?; let (name2, email2, rest) = parse_name_and_email(rest, line_number)?; if !rest.trim().is_empty() { - return Err(Error::UnconsumedInput { - line_number, - line: line.into(), - }); + return Err( + gix_error::message!("Line {line_number} has too many names or emails, or none at all: {line:?}").raise(), + ); } Ok(match (name1, email1, name2, email2) { (Some(proper_name), Some(commit_email), None, None) => Entry::change_name_by_email(proper_name, commit_email), @@ -81,11 +65,10 @@ fn parse_line(line: &BStr, line_number: usize) -> Result, Error> { Entry::change_email_by_name_and_email(proper_email, commit_name, commit_email) } _ => { - return Err(Error::Malformed { - line_number, - line: line.into(), - message: "Emails without a name or email to map to are invalid".into(), - }) + return Err(gix_error::message!( + "{line_number}: {line:?}: Emails without a name or email to map to are invalid" + ) + .raise()) } }) } @@ -97,18 +80,12 @@ fn parse_name_and_email( match line.find_byte(b'<') { Some(start_bracket) => { let email = &line[start_bracket + 1..]; - let closing_bracket = email.find_byte(b'>').ok_or_else(|| Error::Malformed { - line_number, - line: line.into(), - message: "Missing closing bracket '>' in email".into(), - })?; + let closing_bracket = email + .find_byte(b'>') + .ok_or_raise(|| gix_error::message!("{line_number}: {line:?}: Missing closing bracket '>' in email"))?; let email = email[..closing_bracket].trim().as_bstr(); if email.is_empty() { - return Err(Error::Malformed { - line_number, - line: line.into(), - message: "Email must not be empty".into(), - }); + return Err(gix_error::message!("{line_number}: {line:?}: Email must not be empty").raise()); } let name = line[..start_bracket].trim().as_bstr(); let rest = line[start_bracket + closing_bracket + 2..].as_bstr(); diff --git a/gix-mailmap/tests/parse/mod.rs b/gix-mailmap/tests/parse/mod.rs index 0e7e38be9d..8d9bb1055e 100644 --- a/gix-mailmap/tests/parse/mod.rs +++ b/gix-mailmap/tests/parse/mod.rs @@ -8,10 +8,12 @@ fn line_numbers_are_counted_correctly_in_errors() { assert_eq!(actual.len(), 2); let err = actual.next().expect("two items left").unwrap_err(); - assert!(matches!(err, parse::Error::Malformed { line_number: 3, .. })); + let err_str = err.to_string(); + assert!(err_str.contains("3:"), "expected line 3, got: {err_str}"); let err = actual.next().expect("one item left").unwrap_err(); - assert!(matches!(err, parse::Error::UnconsumedInput { line_number: 5, .. })); + let err_str = err.to_string(); + assert!(err_str.contains("Line 5"), "expected line 5, got: {err_str}"); } #[test] @@ -84,39 +86,35 @@ fn valid_entries() { #[test] fn error_if_there_is_just_a_name() { - assert!(matches!( - try_line("just a name"), - Err(parse::Error::UnconsumedInput { line_number: 1, .. }) - )); + let err = try_line("just a name").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("Line 1"), "expected line 1, got: {err_str}"); } #[test] fn error_if_there_is_just_an_email() { - assert!(matches!( - try_line(""), - Err(parse::Error::Malformed { line_number: 1, .. }) - )); + let err = try_line("").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("1:"), "expected line 1, got: {err_str}"); - assert!(matches!( - try_line(" \t "), - Err(parse::Error::Malformed { line_number: 1, .. }) - )); + let err = try_line(" \t ").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("1:"), "expected line 1, got: {err_str}"); } #[test] fn error_if_email_is_empty() { - assert!(matches!( - try_line("hello <"), - Err(parse::Error::Malformed { line_number: 1, .. }) - )); - assert!(matches!( - try_line("hello < \t"), - Err(parse::Error::Malformed { line_number: 1, .. }) - )); - assert!(matches!( - try_line("hello < \t\r >"), - Err(parse::Error::Malformed { line_number: 1, .. }) - )); + let err = try_line("hello <").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("1:"), "expected line 1, got: {err_str}"); + + let err = try_line("hello < \t").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("1:"), "expected line 1, got: {err_str}"); + + let err = try_line("hello < \t\r >").unwrap_err(); + let err_str = err.to_string(); + assert!(err_str.contains("1:"), "expected line 1, got: {err_str}"); } fn line(input: &str) -> Entry<'_> { diff --git a/gix-merge/Cargo.toml b/gix-merge/Cargo.toml index 7fde903077..f476c3470a 100644 --- a/gix-merge/Cargo.toml +++ b/gix-merge/Cargo.toml @@ -42,6 +42,7 @@ serde = { version = "1.0.114", optional = true, default-features = false, featur document-features = { version = "0.2.0", optional = true } [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-testtools = { path = "../tests/tools" } gix-odb = { path = "../gix-odb" } gix-utils = { path = "../gix-utils" } diff --git a/gix-merge/tests/merge/blob/mod.rs b/gix-merge/tests/merge/blob/mod.rs index 580ef7dfeb..e6d27757bc 100644 --- a/gix-merge/tests/merge/blob/mod.rs +++ b/gix-merge/tests/merge/blob/mod.rs @@ -44,7 +44,7 @@ mod util { impl ObjectDb { /// Insert `data` and return its hash. That can be used to find it again. pub fn insert(&mut self, data: &str) -> Result { - let id = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Blob, data.as_bytes())?; + let id = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Blob, data.as_bytes()).map_err(gix_error::Exn::into_error)?; self.data_by_id.insert(id, data.into()); Ok(id) } diff --git a/gix-merge/tests/merge/blob/pipeline.rs b/gix-merge/tests/merge/blob/pipeline.rs index e01b980a77..c251692426 100644 --- a/gix-merge/tests/merge/blob/pipeline.rs +++ b/gix-merge/tests/merge/blob/pipeline.rs @@ -283,7 +283,7 @@ fn non_existing() -> crate::Result { ); assert_eq!(buf.len(), 0, "it's always cleared before any potential use"); - let some_id = gix_hash::ObjectId::from_hex(b"45c160c35c17ad264b96431cceb9793160396e99")?; + let some_id = gix_hash::ObjectId::from_hex(b"45c160c35c17ad264b96431cceb9793160396e99").map_err(gix_error::Exn::into_error)?; let err = filter .convert_to_mergeable( &some_id, diff --git a/gix-merge/tests/merge/tree/mod.rs b/gix-merge/tests/merge/tree/mod.rs index 31612a4d78..8c4555c2cb 100644 --- a/gix-merge/tests/merge/tree/mod.rs +++ b/gix-merge/tests/merge/tree/mod.rs @@ -155,7 +155,7 @@ fn run_baseline() -> crate::Result { skipped_tree_resolve_cases += 1; continue; } - let expected_tree_id = gix_hash::ObjectId::from_hex(std::fs::read_to_string(tree_path)?.trim().as_bytes())?; + let expected_tree_id = gix_hash::ObjectId::from_hex(std::fs::read_to_string(tree_path)?.trim().as_bytes()).map_err(gix_error::Exn::into_error)?; options.tree_merge.tree_conflicts = Some(tree_resolution); let resolve_with_ours = tree_resolution == gix_merge::tree::ResolveWith::Ours; if resolve_with_ours { diff --git a/gix-object/Cargo.toml b/gix-object/Cargo.toml index cfcfd02111..1100561043 100644 --- a/gix-object/Cargo.toml +++ b/gix-object/Cargo.toml @@ -51,6 +51,7 @@ gix-actor = { version = "^0.38.0", path = "../gix-actor" } gix-date = { version = "^0.13.0", path = "../gix-date" } gix-path = { version = "^0.11.0", path = "../gix-path" } gix-utils = { version = "^0.3.1", path = "../gix-utils" } +gix-error = { version = "^0.0.0", path = "../gix-error" } itoa = "1.0.17" thiserror = "2.0.18" diff --git a/gix-object/benches/edit_tree.rs b/gix-object/benches/edit_tree.rs index 067065d589..eb93137213 100644 --- a/gix-object/benches/edit_tree.rs +++ b/gix-object/benches/edit_tree.rs @@ -1,6 +1,7 @@ use std::{cell::RefCell, hint::black_box, rc::Rc}; use criterion::{criterion_group, criterion_main, Criterion, Throughput}; +use gix_error::ResultExt; use gix_hash::ObjectId; use gix_hashtable::hash_map::Entry; use gix_object::{tree, tree::EntryKind, Tree, WriteTo}; @@ -142,7 +143,7 @@ fn new_inmemory_writes() -> (TreeStore, impl FnMut(&Tree) -> Result { @@ -66,8 +66,9 @@ pub mod verify { /// If the hashes do not match, an [`Error`] is returned, containing the actual /// hash of `self`. pub fn verify_checksum(&self, expected: &gix_hash::oid) -> Result { - let actual = crate::compute_hash(expected.kind(), self.kind, self.data)?; - actual.verify(expected)?; + let actual = crate::compute_hash(expected.kind(), self.kind, self.data) + .map_err(|e| Error::Hasher(e.into_error()))?; + actual.verify(expected).map_err(|e| Error::Verify(e.into_error()))?; Ok(actual) } } diff --git a/gix-object/tests/object/commit/from_bytes.rs b/gix-object/tests/object/commit/from_bytes.rs index 7b8827d0a0..2cdeb9fa3f 100644 --- a/gix-object/tests/object/commit/from_bytes.rs +++ b/gix-object/tests/object/commit/from_bytes.rs @@ -362,7 +362,7 @@ fn bogus_multi_gpgsig_header() -> crate::Result { let mut buf = Vec::::new(); commit.write_to(&mut buf)?; - let actual = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Commit, &buf)?; + let actual = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Commit, &buf).map_err(gix_error::Exn::into_error)?; assert_eq!( actual, hex_to_id("5f549aa2f78314ac37bbd436c8f80aea4c752e07"), diff --git a/gix-object/tests/object/tree/editor.rs b/gix-object/tests/object/tree/editor.rs index 70d73af68f..52606ec40d 100644 --- a/gix-object/tests/object/tree/editor.rs +++ b/gix-object/tests/object/tree/editor.rs @@ -25,7 +25,7 @@ fn from_empty_cursor() -> crate::Result { .remove(["with-subdir", "dir", "file"])? .remove(Some("with-subdir2"))? .remove(Some("with-subdir2"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "e2339a3f62e2f3fc54a406739a62a4173ee3b5ac @@ -48,7 +48,7 @@ fn from_empty_cursor() -> crate::Result { }), ); - let actual = edit.write(&mut write)?; + let actual = edit.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "76e0729de84047d19711d90cfcbb4e60bb432682 @@ -65,7 +65,7 @@ fn from_empty_cursor() -> crate::Result { ); let mut cursor = edit.cursor_at(cursor_path)?; - let actual = cursor.remove(Some("empty-dir-via-cursor"))?.write(&mut write)?; + let actual = cursor.remove(Some("empty-dir-via-cursor"))?.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, empty_tree(), "it keeps the empty tree like the editor would"); assert_eq!( edit.get(["some", "deeply", "nested", "path"]), @@ -82,7 +82,7 @@ fn from_empty_cursor() -> crate::Result { "but the removed entry is indee removed" ); - let actual = edit.write(&mut write)?; + let actual = edit.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "6cc592046dcaac06d3c619b4892d9ac78738fb5d @@ -98,7 +98,7 @@ fn from_empty_cursor() -> crate::Result { let actual = cursor .upsert(Some("root-file"), EntryKind::BlobExecutable, any_blob())? .upsert(["nested", "from"], EntryKind::BlobExecutable, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), @@ -110,7 +110,7 @@ fn from_empty_cursor() -> crate::Result { "it is able to write the sub-tree, even though names from the top-level tree are re-used" ); - let actual = edit.write(&mut write)?; + let actual = edit.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "8febb45a1c34e405d70a7ae059d57abdd8254063 @@ -148,7 +148,7 @@ fn from_existing_cursor() -> crate::Result { .remove(Some("file.toml"))? .remove(Some("file.toml.bin"))? .upsert(["some", "nested", "file"], EntryKind::Blob, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( num_writes_and_clear(), 1 + 2, @@ -170,7 +170,7 @@ fn from_existing_cursor() -> crate::Result { let actual = cursor .upsert(Some("hello-from-cursor"), EntryKind::Blob, any_blob())? .remove(Some("file"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree_with_odb(actual, &storage, &odb), "0f090b7c09c94f7895d0d8ce63c1da7693c026b3 @@ -186,7 +186,7 @@ fn from_existing_cursor() -> crate::Result { .remove(Some("file.toml"))? .remove(Some("file.toml.bin"))? .upsert(["some", "nested", "file"], EntryKind::Blob, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree_with_odb(actual, &storage, &odb), "10364deb76aeee372eb486c1216dca2a98dbd379 @@ -199,7 +199,7 @@ fn from_existing_cursor() -> crate::Result { ", "this cursor is the same as the editor" ); - let actual = cursor.remove(["some", "nested", "file"])?.write(&mut write)?; + let actual = cursor.remove(["some", "nested", "file"])?.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree_with_odb(actual, &storage, &odb), "6e2806ab1e4d4ae2c9d24ce113a9bb54f8eff97b @@ -221,7 +221,7 @@ fn from_empty_removal() -> crate::Result { let actual = edit .remove(Some("non-existing"))? .remove(["still", "does", "not", "exist"])? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, empty_tree(), "nothing was actually done"); assert_eq!(num_writes_and_clear(), 1, "it has to write the empty tree though"); assert_eq!(storage.borrow().len(), 1, "the empty tree ends up in storage, too"); @@ -237,7 +237,7 @@ fn from_empty_removal() -> crate::Result { .remove(Some("with-subdir"))? .remove(["with-subdir2", "dir"])? .remove(Some("with-subdir2"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, empty_tree(), "nothing was actually done"); assert_eq!(num_writes_and_clear(), 1, "still nothing to write but the empty tree"); assert_eq!(odb.access_count_and_clear(), 0); @@ -247,7 +247,7 @@ fn from_empty_removal() -> crate::Result { .upsert(Some("empty-dir"), EntryKind::Tree, empty_tree())? .upsert(["with-subdir", "dir", "file"], EntryKind::Blob, any_blob())? .upsert(["with-subdir2", "dir", "file"], EntryKind::Blob, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "9e608223b4cbc733abd20fb6d5b8ea80b074be17 @@ -270,7 +270,7 @@ fn from_empty_removal() -> crate::Result { .remove(Some("with-subdir"))? .remove(["with-subdir2", "dir"])? .remove(Some("with-subdir2"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, empty_tree(), "everything was removed, leaving nothing"); assert_eq!(num_writes_and_clear(), 1, "only the empty tree to write"); assert_eq!( @@ -283,7 +283,7 @@ fn from_empty_removal() -> crate::Result { .upsert(["with-subdir", "file"], EntryKind::Blob, any_blob())? .upsert(["with-subdir", "dir", "file"], EntryKind::Blob, any_blob())? .remove(["with-subdir", "dir"])? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "da2277079f7e9e5a012c9a03d1aac710866ee2c5 @@ -315,7 +315,7 @@ fn from_existing_remove() -> crate::Result { .remove(Some("bin.d"))? .remove(Some("file.toml.bin"))? .remove(Some("file.0"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree_with_odb(actual, &storage, &odb), "dfd0d048f8e852879ad8e1a6a9b810873de16a9c @@ -337,12 +337,12 @@ fn from_existing_remove() -> crate::Result { .remove(Some("file.to"))? .remove(Some("file.toml"))? .remove(Some("file0"))? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, empty_tree(), "nothing is left"); assert_eq!(num_writes_and_clear(), 1, "only the empty tree is written"); assert_eq!(odb.access_count_and_clear(), 0); - let actual = edit.set_root(root_tree).remove(["file", "a"])?.write(&mut write)?; + let actual = edit.set_root(root_tree).remove(["file", "a"])?.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(num_writes_and_clear(), 1, "it writes the changed root-tree"); assert_eq!( odb.access_count_and_clear(), @@ -404,7 +404,7 @@ fn from_empty_invalid_write() -> crate::Result { .remove(Some("a"))? .remove(Some("with\0null"))? .upsert(Some("works"), EntryKind::Blob, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "d5b913c39b06507c7c64adb16c268ce1102ef5c1 @@ -532,7 +532,7 @@ fn from_empty_add() -> crate::Result { assert_eq!(num_writes_and_clear(), 4); assert_eq!(odb.access_count_and_clear(), 0); - let actual = edit.upsert(["x"], EntryKind::Blob, any_blob())?.write(&mut write)?; + let actual = edit.upsert(["x"], EntryKind::Blob, any_blob())?.write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "c2bb1616d4db21d99a30a1219d7d47e969f42e26 @@ -548,7 +548,7 @@ fn from_empty_add() -> crate::Result { let prev_tree = actual; let actual = edit .upsert(["a", "b", "c"], EntryKind::BlobExecutable, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, prev_tree, "writing the same path again is a no-op"); assert_eq!( num_writes_and_clear(), @@ -578,7 +578,7 @@ fn from_empty_add() -> crate::Result { .set_root(Tree::default()) .upsert(["a", "b", "c"], EntryKind::Blob, any_blob())? .upsert(["a"], EntryKind::BlobExecutable, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "f7b85940c3afa596829cacf98e98ff8bfd7c68ed @@ -597,7 +597,7 @@ fn from_empty_add() -> crate::Result { .upsert(["a", "b"], EntryKind::Tree, empty_tree())? .upsert(["a", "b", "c"], EntryKind::BlobExecutable, any_blob())? // .upsert(["a", "b", "d"], EntryKind::Blob, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( display_tree(actual, &storage), "d8d3f558776965f70452625b72363234f517b290 @@ -721,7 +721,7 @@ fn from_existing_add() -> crate::Result { .upsert(["a", "b", "c"], EntryKind::Blob, any_blob())? .upsert(["a", "b"], EntryKind::Blob, any_blob())? .upsert(["file"], EntryKind::BlobExecutable, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!(odb.access_count_and_clear(), 2, "`a` and `a/b`"); assert_eq!( display_tree_with_odb(actual, &storage, &odb), @@ -746,7 +746,7 @@ fn from_existing_add() -> crate::Result { let actual = edit .set_root(root_tree) .upsert(["file", "subdir", "exe"], EntryKind::BlobExecutable, any_blob())? - .write(&mut write)?; + .write(&mut write).map_err(gix_error::Exn::into_error)?; assert_eq!( odb.access_count_and_clear(), 1, @@ -779,6 +779,7 @@ mod utils { }; use bstr::{BStr, ByteSlice}; + use gix_error::ResultExt; use gix_hash::ObjectId; use gix_object::{Tree, WriteTo}; @@ -810,7 +811,7 @@ mod utils { let mut buf = Vec::with_capacity(512); move |tree: &Tree| { buf.clear(); - tree.write_to(&mut buf)?; + tree.write_to(&mut buf).or_raise(|| gix_error::message("write tree to buffer"))?; let id = gix_object::compute_hash(gix_hash::Kind::Sha1, gix_object::Kind::Tree, &buf)?; store.borrow_mut().insert(id, tree.clone()); let old = num_writes.get(); diff --git a/gix-odb/Cargo.toml b/gix-odb/Cargo.toml index 533dbad5ec..8d29fea8af 100644 --- a/gix-odb/Cargo.toml +++ b/gix-odb/Cargo.toml @@ -20,6 +20,7 @@ doctest = false serde = ["dep:serde", "gix-hash/serde", "gix-object/serde", "gix-pack/serde"] [dependencies] +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-features = { version = "^0.46.0", path = "../gix-features", features = ["walkdir", "zlib", "crc32"] } gix-hashtable = { version = "^0.12.0", path = "../gix-hashtable" } gix-hash = { version = "^0.22.0", path = "../gix-hash" } diff --git a/gix-odb/src/memory.rs b/gix-odb/src/memory.rs index 6862437f1f..72706a071c 100644 --- a/gix-odb/src/memory.rs +++ b/gix-odb/src/memory.rs @@ -222,7 +222,8 @@ where let mut buf = Vec::new(); from.read_to_end(&mut buf)?; - let id = gix_object::compute_hash(self.object_hash, kind, &buf)?; + let id = gix_object::compute_hash(self.object_hash, kind, &buf) + .map_err(|e| Box::new(e.into_error()) as gix_object::write::Error)?; map.borrow_mut().insert(id, (kind, buf)); Ok(id) } diff --git a/gix-odb/src/sink.rs b/gix-odb/src/sink.rs index c06df83631..25da7bd14b 100644 --- a/gix-odb/src/sink.rs +++ b/gix-odb/src/sink.rs @@ -53,6 +53,8 @@ impl gix_object::Write for Sink { c.reset(); } - Ok(hasher.try_finalize()?) + hasher + .try_finalize() + .map_err(|e| Box::new(e.into_error()) as Box) } } diff --git a/gix-odb/src/store_impls/loose/find.rs b/gix-odb/src/store_impls/loose/find.rs index 6ac6d26de6..6451fb8ffe 100644 --- a/gix-odb/src/store_impls/loose/find.rs +++ b/gix-odb/src/store_impls/loose/find.rs @@ -9,10 +9,7 @@ use crate::store_impls::loose::{hash_path, Store, HEADER_MAX_SIZE}; #[allow(missing_docs)] pub enum Error { #[error("decompression of loose object at '{path}' failed")] - DecompressFile { - source: zlib::inflate::Error, - path: PathBuf, - }, + DecompressFile { source: gix_error::Error, path: PathBuf }, #[error("file at '{path}' showed invalid size of inflated data, expected {expected}, got {actual}")] SizeMismatch { actual: u64, expected: u64, path: PathBuf }, #[error(transparent)] @@ -165,13 +162,15 @@ impl Store { inflate .once(compressed_buf, header_buf) .map_err(|e| Error::DecompressFile { - source: e, + source: e.into_error(), path: path.to_owned(), })?; if status == zlib::Status::BufError { return Err(Error::DecompressFile { - source: zlib::inflate::Error::Status(status), + source: gix_error::Error::from_error(gix_error::message!( + "The zlib status indicated an error, status was '{status:?}'" + )), path, }); } @@ -202,7 +201,7 @@ impl Store { inflate .once(&input[..bytes_read], output) .map_err(|e| Error::DecompressFile { - source: e, + source: e.into_error(), path: path.to_owned(), })?, bytes_read, @@ -210,7 +209,9 @@ impl Store { }; if status == zlib::Status::BufError { return Err(Error::DecompressFile { - source: zlib::inflate::Error::Status(status), + source: gix_error::Error::from_error(gix_error::message!( + "The zlib status indicated an error, status was '{status:?}'" + )), path, }); } diff --git a/gix-odb/src/store_impls/loose/verify.rs b/gix-odb/src/store_impls/loose/verify.rs index afe3aa2788..b6d557ba20 100644 --- a/gix-odb/src/store_impls/loose/verify.rs +++ b/gix-odb/src/store_impls/loose/verify.rs @@ -22,14 +22,14 @@ pub mod integrity { #[error("{kind} object {expected} could not be hashed")] ObjectHasher { #[source] - source: gix_hash::hasher::Error, + source: gix_error::Error, kind: gix_object::Kind, expected: gix_hash::ObjectId, }, #[error("{kind} object wasn't re-encoded without change")] ObjectEncodeMismatch { #[source] - source: gix_hash::verify::Error, + source: gix_error::Error, kind: gix_object::Kind, }, #[error("Objects were deleted during iteration - try again")] @@ -86,13 +86,13 @@ impl Store { .ok_or(integrity::Error::Retry)?; sink.write_buf(object.kind, object.data) .map_err(|err| integrity::Error::ObjectHasher { - source: *err.downcast().expect("sink can only fail in hasher"), + source: gix_error::ErrorExt::raise(gix_error::message!("{err}")).into_error(), kind: object.kind, expected: id, })? .verify(&id) .map_err(|err| integrity::Error::ObjectEncodeMismatch { - source: err, + source: err.into_error(), kind: object.kind, })?; object.decode().map_err(|err| integrity::Error::ObjectDecode { diff --git a/gix-odb/src/store_impls/loose/write.rs b/gix-odb/src/store_impls/loose/write.rs index d94c16556b..d0fec6abd6 100644 --- a/gix-odb/src/store_impls/loose/write.rs +++ b/gix-odb/src/store_impls/loose/write.rs @@ -13,7 +13,7 @@ use crate::store_impls::loose; pub enum Error { #[error("Could not {message} '{path}'")] Io { - source: gix_hash::io::Error, + source: gix_error::Error, message: &'static str, path: PathBuf, }, @@ -30,12 +30,12 @@ impl gix_object::Write for Store { fn write(&self, object: &dyn WriteTo) -> Result { let mut to = self.dest()?; to.write_all(&object.loose_header()).map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "write header to tempfile in", path: self.path.to_owned(), })?; object.write_to(&mut to).map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "stream all data into tempfile in", path: self.path.to_owned(), })?; @@ -50,13 +50,13 @@ impl gix_object::Write for Store { let mut to = self.dest().map_err(Box::new)?; to.write_all(&gix_object::encode::loose_header(kind, from.len() as u64)) .map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "write header to tempfile in", path: self.path.to_owned(), })?; to.write_all(from).map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "stream all data into tempfile in", path: self.path.to_owned(), })?; @@ -76,14 +76,14 @@ impl gix_object::Write for Store { let mut to = self.dest().map_err(Box::new)?; to.write_all(&gix_object::encode::loose_header(kind, size)) .map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "write header to tempfile in", path: self.path.to_owned(), })?; io::copy(&mut from, &mut to) .map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "stream all data into tempfile in", path: self.path.to_owned(), }) @@ -117,7 +117,7 @@ impl Store { } Ok(gix_hash::io::Write::new( deflate::Write::new(builder.tempfile_in(&self.path).map_err(|err| Error::Io { - source: err.into(), + source: gix_error::Error::from_error(err), message: "create named temp file in", path: self.path.to_owned(), })?), @@ -130,7 +130,7 @@ impl Store { gix_hash::io::Write { hash, inner: file }: gix_hash::io::Write, ) -> Result { let id = hash.try_finalize().map_err(|err| Error::Io { - source: err.into(), + source: err.into_error(), message: "hash tempfile in", path: self.path.to_owned(), })?; diff --git a/gix-odb/tests/Cargo.toml b/gix-odb/tests/Cargo.toml index 338b99fafe..16e9de862b 100644 --- a/gix-odb/tests/Cargo.toml +++ b/gix-odb/tests/Cargo.toml @@ -19,6 +19,7 @@ name = "integrate" path = "integrate.rs" [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-odb = { path = ".." } gix-features = { path = "../../gix-features" } gix-hash = { path = "../../gix-hash" } diff --git a/gix-odb/tests/odb/store/dynamic.rs b/gix-odb/tests/odb/store/dynamic.rs index e0241f2ee4..f564211edd 100644 --- a/gix-odb/tests/odb/store/dynamic.rs +++ b/gix-odb/tests/odb/store/dynamic.rs @@ -553,7 +553,7 @@ mod disambiguate_prefix { for (index, oid) in handle.iter()?.with_ordering(order).map(Result::unwrap).enumerate() { let hex_len = hex_lengths[index % hex_lengths.len()]; let prefix = handle - .disambiguate_prefix(Candidate::new(oid, hex_len)?)? + .disambiguate_prefix(Candidate::new(oid, hex_len).map_err(gix_error::Exn::into_error)?)? .expect("object exists"); assert_eq!(prefix.hex_len(), hex_len); assert_eq!(prefix.cmp_oid(&oid), Ordering::Equal); @@ -712,7 +712,7 @@ mod lookup_prefix { for (index, oid) in handle.iter()?.with_ordering(order).map(Result::unwrap).enumerate() { for mut candidates in [None, Some(HashSet::default())] { let hex_len = hex_lengths[index % hex_lengths.len()]; - let prefix = gix_hash::Prefix::new(&oid, hex_len)?; + let prefix = gix_hash::Prefix::new(&oid, hex_len).map_err(gix_error::Exn::into_error)?; assert_eq!( handle .lookup_prefix(prefix, candidates.as_mut())? diff --git a/gix-pack/src/cache/delta/traverse/mod.rs b/gix-pack/src/cache/delta/traverse/mod.rs index 1e0026af78..21c4a45a34 100644 --- a/gix-pack/src/cache/delta/traverse/mod.rs +++ b/gix-pack/src/cache/delta/traverse/mod.rs @@ -21,7 +21,7 @@ pub(crate) mod util; pub enum Error { #[error("{message}")] ZlibInflate { - source: gix_features::zlib::inflate::Error, + source: gix_error::Error, message: &'static str, }, #[error("The resolver failed to obtain the pack entry bytes for the entry at {pack_offset}")] diff --git a/gix-pack/src/cache/delta/traverse/resolve.rs b/gix-pack/src/cache/delta/traverse/resolve.rs index fbb038b5d0..96e4e28711 100644 --- a/gix-pack/src/cache/delta/traverse/resolve.rs +++ b/gix-pack/src/cache/delta/traverse/resolve.rs @@ -461,7 +461,7 @@ fn decompress_all_at_once_with( out.resize(decompressed_len, 0); inflate.reset(); inflate.once(b, out).map_err(|err| Error::ZlibInflate { - source: err, + source: err.into_error(), message: "Failed to decompress entry", })?; Ok(()) diff --git a/gix-pack/src/data/file/decode/entry.rs b/gix-pack/src/data/file/decode/entry.rs index 9711961e8f..0804383001 100644 --- a/gix-pack/src/data/file/decode/entry.rs +++ b/gix-pack/src/data/file/decode/entry.rs @@ -97,7 +97,7 @@ impl File { ); self.decompress_entry_from_data_offset(entry.data_offset, inflate, out) - .map_err(Into::into) + .map_err(|e| Error::ZlibInflate(e.into_error())) } /// Obtain the [`Entry`][crate::data::Entry] at the given `offset` into the pack. @@ -282,11 +282,13 @@ impl File { let mut relative_delta_start = 0; let mut biggest_result_size = 0; for (delta_idx, delta) in chain.iter_mut().rev().enumerate() { - let consumed_from_data_offset = self.decompress_entry_from_data_offset( - delta.data_offset, - inflate, - &mut instructions[..delta.decompressed_size], - )?; + let consumed_from_data_offset = self + .decompress_entry_from_data_offset( + delta.data_offset, + inflate, + &mut instructions[..delta.decompressed_size], + ) + .map_err(|e| Error::ZlibInflate(e.into_error()))?; let is_last_delta_to_be_applied = delta_idx + 1 == chain_len; if is_last_delta_to_be_applied { consumed_input = Some(consumed_from_data_offset); @@ -346,7 +348,8 @@ impl File { debug_assert!(!base_entry.header.is_delta()); object_kind = base_entry.header.as_kind(); let out_base = &mut out[..out_size - total_delta_data_size]; - self.decompress_entry_from_data_offset(base_entry.data_offset, inflate, out_base)?; + self.decompress_entry_from_data_offset(base_entry.data_offset, inflate, out_base) + .map_err(|e| Error::ZlibInflate(e.into_error()))?; } (first_buffer_size, second_buffer_end) diff --git a/gix-pack/src/data/file/decode/header.rs b/gix-pack/src/data/file/decode/header.rs index 1eb2e7b2f6..ac2e1bab65 100644 --- a/gix-pack/src/data/file/decode/header.rs +++ b/gix-pack/src/data/file/decode/header.rs @@ -96,7 +96,8 @@ impl File { fn decode_delta_object_size(&self, inflate: &mut zlib::Inflate, entry: &data::Entry) -> Result { let mut buf = [0_u8; 32]; let used = self - .decompress_entry_from_data_offset_2(entry.data_offset, inflate, &mut buf)? + .decompress_entry_from_data_offset_2(entry.data_offset, inflate, &mut buf) + .map_err(|e| Error::ZlibInflate(e.into_error()))? .1; let buf = &buf[..used]; let (_base_size, offset) = delta::decode_header_size(buf); diff --git a/gix-pack/src/data/file/decode/mod.rs b/gix-pack/src/data/file/decode/mod.rs index 71bbf1595c..b70123cd0f 100644 --- a/gix-pack/src/data/file/decode/mod.rs +++ b/gix-pack/src/data/file/decode/mod.rs @@ -12,7 +12,7 @@ pub mod header; #[allow(missing_docs)] pub enum Error { #[error("Failed to decompress pack entry")] - ZlibInflate(#[from] gix_features::zlib::inflate::Error), + ZlibInflate(#[from] gix_error::Error), #[error("A delta chain could not be followed as the ref base with id {0} could not be found")] DeltaBaseUnresolved(gix_hash::ObjectId), #[error(transparent)] diff --git a/gix-pack/src/data/input/bytes_to_entries.rs b/gix-pack/src/data/input/bytes_to_entries.rs index 4f563621df..5f297b706b 100644 --- a/gix-pack/src/data/input/bytes_to_entries.rs +++ b/gix-pack/src/data/input/bytes_to_entries.rs @@ -52,7 +52,8 @@ where object_hash: gix_hash::Kind, ) -> Result, input::Error> { let mut header_data = [0u8; 12]; - read.read_exact(&mut header_data).map_err(gix_hash::io::Error::from)?; + read.read_exact(&mut header_data) + .map_err(|e| input::Error::Io(gix_error::Error::from_error(e)))?; let (version, num_objects) = crate::data::header::decode(&header_data)?; assert_eq!( @@ -97,7 +98,7 @@ where } None => crate::data::Entry::from_read(&mut self.read, self.offset, self.hash_len), } - .map_err(gix_hash::io::Error::from)?; + .map_err(|e| input::Error::Io(gix_error::Error::from_error(e)))?; // Decompress object to learn its compressed bytes let compressed_buf = self.compressed_buf.take().unwrap_or_else(|| Vec::with_capacity(4096)); @@ -114,7 +115,8 @@ where decompressor: &mut self.decompressor, }; - let bytes_copied = io::copy(&mut decompressed_reader, &mut io::sink()).map_err(gix_hash::io::Error::from)?; + let bytes_copied = io::copy(&mut decompressed_reader, &mut io::sink()) + .map_err(|e| input::Error::Io(gix_error::Error::from_error(e)))?; if bytes_copied != entry.decompressed_size { return Err(input::Error::IncompletePack { actual: bytes_copied, @@ -141,7 +143,7 @@ where let header_len = entry .header .write_to(bytes_copied, &mut header_buf.as_mut()) - .map_err(gix_hash::io::Error::from)?; + .map_err(|e| input::Error::Io(gix_error::Error::from_error(e)))?; let state = gix_features::hash::crc32_update(0, &header_buf[..header_len]); Some(gix_features::hash::crc32_update(state, &compressed)) } else { @@ -175,22 +177,24 @@ where let mut id = gix_hash::ObjectId::null(self.object_hash); if let Err(err) = self.read.read_exact(id.as_mut_slice()) { if self.mode != input::Mode::Restore { - return Err(input::Error::Io(err.into())); + return Err(input::Error::Io(gix_error::Error::from_error(err))); } } if let Some(hash) = self.hash.take() { - let actual_id = hash.try_finalize().map_err(gix_hash::io::Error::from)?; + let actual_id = hash.try_finalize().map_err(|e| input::Error::Io(e.into_error()))?; if self.mode == input::Mode::Restore { id = actual_id; } else { - actual_id.verify(&id)?; + actual_id + .verify(&id) + .map_err(|e| input::Error::Verify(e.into_error()))?; } } Some(id) } else if self.mode == input::Mode::Restore { let hash = self.hash.clone().expect("in restore mode a hash is set"); - Some(hash.try_finalize().map_err(gix_hash::io::Error::from)?) + Some(hash.try_finalize().map_err(|e| input::Error::Io(e.into_error()))?) } else { None }) @@ -272,8 +276,10 @@ where impl crate::data::File { /// Returns an iterator over [`Entries`][crate::data::input::Entry], without making use of the memory mapping. pub fn streaming_iter(&self) -> Result, input::Error> { - let reader = - io::BufReader::with_capacity(4096 * 8, fs::File::open(&self.path).map_err(gix_hash::io::Error::from)?); + let reader = io::BufReader::with_capacity( + 4096 * 8, + fs::File::open(&self.path).map_err(|e| input::Error::Io(gix_error::Error::from_error(e)))?, + ); BytesToEntriesIter::new_from_header( reader, input::Mode::Verify, diff --git a/gix-pack/src/data/input/entries_to_bytes.rs b/gix-pack/src/data/input/entries_to_bytes.rs index 1cf308a6d8..d1013b9748 100644 --- a/gix-pack/src/data/input/entries_to_bytes.rs +++ b/gix-pack/src/data/input/entries_to_bytes.rs @@ -64,34 +64,43 @@ where self.trailer } - fn next_inner(&mut self, entry: input::Entry) -> Result { + fn next_inner(&mut self, entry: input::Entry) -> Result { if self.num_entries == 0 { let header_bytes = crate::data::header::encode(self.data_version, 0); - self.output.write_all(&header_bytes[..])?; + self.output + .write_all(&header_bytes[..]) + .map_err(gix_error::Error::from_error)?; } self.num_entries += 1; - entry.header.write_to(entry.decompressed_size, &mut self.output)?; - self.output.write_all( - entry - .compressed - .as_deref() - .expect("caller must configure generator to keep compressed bytes"), - )?; + entry + .header + .write_to(entry.decompressed_size, &mut self.output) + .map_err(gix_error::Error::from_error)?; + self.output + .write_all( + entry + .compressed + .as_deref() + .expect("caller must configure generator to keep compressed bytes"), + ) + .map_err(gix_error::Error::from_error)?; Ok(entry) } - fn write_header_and_digest(&mut self, last_entry: Option<&mut input::Entry>) -> Result<(), gix_hash::io::Error> { + fn write_header_and_digest(&mut self, last_entry: Option<&mut input::Entry>) -> Result<(), gix_error::Error> { let header_bytes = crate::data::header::encode(self.data_version, self.num_entries); let num_bytes_written = if last_entry.is_some() { - self.output.stream_position()? + self.output.stream_position().map_err(gix_error::Error::from_error)? } else { header_bytes.len() as u64 }; - self.output.rewind()?; - self.output.write_all(&header_bytes[..])?; - self.output.flush()?; + self.output.rewind().map_err(gix_error::Error::from_error)?; + self.output + .write_all(&header_bytes[..]) + .map_err(gix_error::Error::from_error)?; + self.output.flush().map_err(gix_error::Error::from_error)?; - self.output.rewind()?; + self.output.rewind().map_err(gix_error::Error::from_error)?; let interrupt_never = std::sync::atomic::AtomicBool::new(false); let digest = gix_hash::bytes( &mut self.output, @@ -99,9 +108,12 @@ where self.object_hash, &mut gix_features::progress::Discard, &interrupt_never, - )?; - self.output.write_all(digest.as_slice())?; - self.output.flush()?; + ) + .map_err(gix_error::Exn::into_error)?; + self.output + .write_all(digest.as_slice()) + .map_err(gix_error::Error::from_error)?; + self.output.flush().map_err(gix_error::Error::from_error)?; self.is_done = true; if let Some(last_entry) = last_entry { @@ -136,7 +148,7 @@ where Ok(entry) } }) - .map_err(input::Error::from), + .map_err(input::Error::Io), Err(err) => { self.is_done = true; Err(err) @@ -144,7 +156,7 @@ where }), None => match self.write_header_and_digest(None) { Ok(_) => None, - Err(err) => Some(Err(err.into())), + Err(err) => Some(Err(input::Error::Io(err))), }, } } diff --git a/gix-pack/src/data/input/entry.rs b/gix-pack/src/data/input/entry.rs index 55fb439afc..0693cc2106 100644 --- a/gix-pack/src/data/input/entry.rs +++ b/gix-pack/src/data/input/entry.rs @@ -54,7 +54,7 @@ fn compress_data(obj: &gix_object::Data<'_>) -> Result, input::Error> { let mut out = gix_features::zlib::stream::deflate::Write::new(Vec::new()); if let Err(err) = std::io::copy(&mut &*obj.data, &mut out) { match err.kind() { - std::io::ErrorKind::Other => return Err(input::Error::Io(err.into())), + std::io::ErrorKind::Other => return Err(input::Error::Io(gix_error::Error::from_error(err))), err => { unreachable!("Should never see other errors than zlib, but got {:?}", err) } diff --git a/gix-pack/src/data/input/types.rs b/gix-pack/src/data/input/types.rs index 46a694a217..80c00160a1 100644 --- a/gix-pack/src/data/input/types.rs +++ b/gix-pack/src/data/input/types.rs @@ -4,11 +4,11 @@ #[allow(missing_docs)] pub enum Error { #[error("An IO operation failed while streaming an entry")] - Io(#[from] gix_hash::io::Error), + Io(#[source] gix_error::Error), #[error(transparent)] PackParse(#[from] crate::data::header::decode::Error), #[error("Failed to verify pack checksum in trailer")] - Verify(#[from] gix_hash::verify::Error), + Verify(#[source] gix_error::Error), #[error("pack is incomplete: it was decompressed into {actual} bytes but {expected} bytes where expected.")] IncompletePack { actual: u64, expected: u64 }, #[error("The object {object_id} could not be decoded or wasn't found")] diff --git a/gix-pack/src/data/output/bytes.rs b/gix-pack/src/data/output/bytes.rs index c6ee8e5f1e..414a696663 100644 --- a/gix-pack/src/data/output/bytes.rs +++ b/gix-pack/src/data/output/bytes.rs @@ -10,7 +10,7 @@ where E: std::error::Error + 'static, { #[error(transparent)] - Io(#[from] gix_hash::io::Error), + Io(#[from] gix_error::Error), #[error(transparent)] Input(E), } @@ -97,7 +97,7 @@ where let header_bytes = crate::data::header::encode(version, num_entries); self.output .write_all(&header_bytes[..]) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; self.written += header_bytes.len() as u64; } match self.input.next() { @@ -117,24 +117,19 @@ where }); self.written += header .write_to(entry.decompressed_size as u64, &mut self.output) - .map_err(gix_hash::io::Error::from)? as u64; + .map_err(gix_error::Error::from_error)? as u64; self.written += std::io::copy(&mut &*entry.compressed_data, &mut self.output) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; } } None => { - let digest = self - .output - .hash - .clone() - .try_finalize() - .map_err(gix_hash::io::Error::from)?; + let digest = self.output.hash.clone().try_finalize().map_err(gix_error::Exn::into_error)?; self.output .inner .write_all(digest.as_slice()) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; self.written += digest.as_slice().len() as u64; - self.output.inner.flush().map_err(gix_hash::io::Error::from)?; + self.output.inner.flush().map_err(gix_error::Error::from_error)?; self.is_done = true; self.trailer = Some(digest); } diff --git a/gix-pack/src/index/encode.rs b/gix-pack/src/index/encode.rs index 815b134e99..1d255d5659 100644 --- a/gix-pack/src/index/encode.rs +++ b/gix-pack/src/index/encode.rs @@ -73,7 +73,7 @@ mod function { pack_hash: &gix_hash::ObjectId, kind: crate::index::Version, progress: &mut dyn DynNestedProgress, - ) -> Result { + ) -> Result { use io::Write; assert_eq!(kind, crate::index::Version::V2, "Can only write V2 packs right now"); assert!( @@ -86,8 +86,9 @@ mod function { 8 * 4096, gix_hash::io::Write::new(out, kind.hash()), )); - out.write_all(V2_SIGNATURE)?; - out.write_all(&(kind as u32).to_be_bytes())?; + out.write_all(V2_SIGNATURE).map_err(gix_error::Error::from_error)?; + out.write_all(&(kind as u32).to_be_bytes()) + .map_err(gix_error::Error::from_error)?; progress.init(Some(4), progress::steps()); let start = std::time::Instant::now(); @@ -95,19 +96,22 @@ mod function { let fan_out = fanout(&mut entries_sorted_by_oid.iter().map(|e| e.data.id.first_byte())); for value in fan_out.iter() { - out.write_all(&value.to_be_bytes())?; + out.write_all(&value.to_be_bytes()) + .map_err(gix_error::Error::from_error)?; } progress.inc(); let _info = progress.add_child_with_id("writing ids".into(), gix_features::progress::UNKNOWN); for entry in &entries_sorted_by_oid { - out.write_all(entry.data.id.as_slice())?; + out.write_all(entry.data.id.as_slice()) + .map_err(gix_error::Error::from_error)?; } progress.inc(); let _info = progress.add_child_with_id("writing crc32".into(), gix_features::progress::UNKNOWN); for entry in &entries_sorted_by_oid { - out.write_all(&entry.data.crc32.to_be_bytes())?; + out.write_all(&entry.data.crc32.to_be_bytes()) + .map_err(gix_error::Error::from_error)?; } progress.inc(); @@ -125,20 +129,28 @@ mod function { } else { entry.offset as u32 }; - out.write_all(&offset.to_be_bytes())?; + out.write_all(&offset.to_be_bytes()) + .map_err(gix_error::Error::from_error)?; } for value in offsets64 { - out.write_all(&value.to_be_bytes())?; + out.write_all(&value.to_be_bytes()) + .map_err(gix_error::Error::from_error)?; } } - out.write_all(pack_hash.as_slice())?; + out.write_all(pack_hash.as_slice()) + .map_err(gix_error::Error::from_error)?; let bytes_written_without_trailer = out.bytes; - let out = out.inner.into_inner().map_err(io::Error::from)?; - let index_hash = out.hash.try_finalize()?; - out.inner.write_all(index_hash.as_slice())?; - out.inner.flush()?; + let out = out + .inner + .into_inner() + .map_err(|e| gix_error::Error::from_error(io::Error::from(e)))?; + let index_hash = out.hash.try_finalize().map_err(gix_error::Exn::into_error)?; + out.inner + .write_all(index_hash.as_slice()) + .map_err(gix_error::Error::from_error)?; + out.inner.flush().map_err(gix_error::Error::from_error)?; progress.inc(); progress.show_throughput_with( diff --git a/gix-pack/src/index/traverse/error.rs b/gix-pack/src/index/traverse/error.rs index a77b011af2..32e67049b6 100644 --- a/gix-pack/src/index/traverse/error.rs +++ b/gix-pack/src/index/traverse/error.rs @@ -21,7 +21,7 @@ pub enum Error { source: crate::data::decode::Error, }, #[error("The packfiles checksum didn't match the index file checksum")] - PackMismatch(#[source] gix_hash::verify::Error), + PackMismatch(#[source] gix_error::Error), #[error("Failed to verify pack file checksum")] PackVerify(#[source] crate::verify::checksum::Error), #[error("Error verifying object at offset {offset} against checksum in the index file")] diff --git a/gix-pack/src/index/traverse/mod.rs b/gix-pack/src/index/traverse/mod.rs index d6b3c5cac0..70d1f17c50 100644 --- a/gix-pack/src/index/traverse/mod.rs +++ b/gix-pack/src/index/traverse/mod.rs @@ -128,7 +128,7 @@ impl index::File { Ok(if check.file_checksum() { pack.checksum() .verify(&self.pack_checksum()) - .map_err(Error::PackMismatch)?; + .map_err(|e| Error::PackMismatch(e.into_error()))?; let (pack_res, id) = parallel::join( move || pack.verify_checksum(pack_progress, should_interrupt), move || self.verify_checksum(index_progress, should_interrupt), diff --git a/gix-pack/src/index/write/error.rs b/gix-pack/src/index/write/error.rs index dbdbef717f..74e8bff1e9 100644 --- a/gix-pack/src/index/write/error.rs +++ b/gix-pack/src/index/write/error.rs @@ -3,7 +3,7 @@ #[allow(missing_docs)] pub enum Error { #[error("An error occurred when writing the pack index file")] - Io(#[from] gix_hash::io::Error), + Io(#[from] gix_error::Error), #[error("A pack entry could not be extracted")] PackEntryDecode(#[from] crate::data::input::Error), #[error("Indices of type {} cannot be written, only {} are supported", *.0 as usize, crate::index::Version::default() as usize)] diff --git a/gix-pack/src/index/write/mod.rs b/gix-pack/src/index/write/mod.rs index 2c1c0330c2..b0be51216d 100644 --- a/gix-pack/src/index/write/mod.rs +++ b/gix-pack/src/index/write/mod.rs @@ -180,7 +180,7 @@ impl crate::index::File { root_progress.inc(); - let (resolver, pack) = make_resolver().map_err(gix_hash::io::Error::from)?; + let (resolver, pack) = make_resolver().map_err(gix_error::Error::from_error)?; let sorted_pack_offsets_by_oid = { let traverse::Outcome { roots, children } = tree.traverse( resolver, @@ -224,7 +224,7 @@ impl crate::index::File { let header = crate::data::header::encode(pack_version, 0); let mut hasher = gix_hash::hasher(object_hash); hasher.update(&header); - hasher.try_finalize().map_err(gix_hash::io::Error::from)? + hasher.try_finalize().map_err(gix_error::Exn::into_error)? } None => return Err(Error::IteratorInvariantTrailer), }; @@ -255,9 +255,9 @@ fn modify_base( pack_entry: &crate::data::Entry, decompressed: &[u8], hash: gix_hash::Kind, -) -> Result<(), gix_hash::hasher::Error> { +) -> Result<(), gix_error::Error> { let object_kind = pack_entry.header.as_kind().expect("base object as source of iteration"); - let id = gix_object::compute_hash(hash, object_kind, decompressed)?; + let id = gix_object::compute_hash(hash, object_kind, decompressed).map_err(gix_error::Exn::into_error)?; entry.id = id; Ok(()) } diff --git a/gix-pack/src/multi_index/write.rs b/gix-pack/src/multi_index/write.rs index cfcc8bb7d1..226777df4f 100644 --- a/gix-pack/src/multi_index/write.rs +++ b/gix-pack/src/multi_index/write.rs @@ -14,7 +14,7 @@ mod error { #[allow(missing_docs)] pub enum Error { #[error(transparent)] - Io(#[from] gix_hash::io::Error), + Io(#[from] gix_error::Error), #[error("Interrupted")] Interrupted, #[error(transparent)] @@ -182,7 +182,7 @@ impl multi_index::File { index_paths_sorted.len() as u32, object_hash, ) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; { progress.set_name("Writing chunks".into()); @@ -190,7 +190,7 @@ impl multi_index::File { let mut chunk_write = cf .into_write(&mut out, bytes_written) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; while let Some(chunk_to_write) = chunk_write.next_chunk() { match chunk_to_write { multi_index::chunk::index_names::ID => { @@ -208,7 +208,7 @@ impl multi_index::File { ), unknown => unreachable!("BUG: forgot to implement chunk {:?}", std::str::from_utf8(&unknown)), } - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; progress.inc(); if should_interrupt.load(Ordering::Relaxed) { return Err(Error::Interrupted); @@ -217,11 +217,11 @@ impl multi_index::File { } // write trailing checksum - let multi_index_checksum = out.inner.hash.try_finalize().map_err(gix_hash::io::Error::from)?; + let multi_index_checksum = out.inner.hash.try_finalize().map_err(gix_error::Exn::into_error)?; out.inner .inner .write_all(multi_index_checksum.as_slice()) - .map_err(gix_hash::io::Error::from)?; + .map_err(gix_error::Error::from_error)?; out.progress.show_throughput(write_start); Ok(Outcome { multi_index_checksum }) diff --git a/gix-pack/src/verify.rs b/gix-pack/src/verify.rs index 8bb6ce6f55..08e10a4801 100644 --- a/gix-pack/src/verify.rs +++ b/gix-pack/src/verify.rs @@ -11,9 +11,9 @@ pub mod checksum { #[error("Interrupted by user")] Interrupted, #[error("Failed to hash data")] - Hasher(#[from] gix_hash::hasher::Error), - #[error(transparent)] - Verify(#[from] gix_hash::verify::Error), + Hasher(#[source] gix_error::Error), + #[error("Failed to verify checksum")] + Verify(#[source] gix_error::Error), } } @@ -44,20 +44,32 @@ pub fn checksum_on_disk_or_mmap( should_interrupt, ) { Ok(id) => id, - Err(gix_hash::io::Error::Io(err)) if err.kind() == std::io::ErrorKind::Interrupted => { - return Err(checksum::Error::Interrupted); + Err(err) => { + if err.to_string().contains("Interrupted") + || err + .downcast_any_ref::() + .is_some_and(|e| e.kind() == std::io::ErrorKind::Interrupted) + { + return Err(checksum::Error::Interrupted); + } + if err.downcast_any_ref::().is_some() { + // Fall back to hashing from memory-mapped data + let start = std::time::Instant::now(); + let mut hasher = gix_hash::hasher(object_hash); + hasher.update(&data[..data_len_without_trailer]); + progress.inc_by(data_len_without_trailer); + progress.show_throughput(start); + hasher + .try_finalize() + .map_err(|e| checksum::Error::Hasher(e.into_error()))? + } else { + return Err(checksum::Error::Hasher(err.into_error())); + } } - Err(gix_hash::io::Error::Io(_io_err)) => { - let start = std::time::Instant::now(); - let mut hasher = gix_hash::hasher(object_hash); - hasher.update(&data[..data_len_without_trailer]); - progress.inc_by(data_len_without_trailer); - progress.show_throughput(start); - hasher.try_finalize()? - } - Err(gix_hash::io::Error::Hasher(err)) => return Err(checksum::Error::Hasher(err)), }; - actual.verify(&expected)?; + actual + .verify(&expected) + .map_err(|e| checksum::Error::Verify(e.into_error()))?; Ok(actual) } diff --git a/gix-pack/tests/Cargo.toml b/gix-pack/tests/Cargo.toml index 82725f7ae7..b0fbd3a9b8 100644 --- a/gix-pack/tests/Cargo.toml +++ b/gix-pack/tests/Cargo.toml @@ -19,6 +19,7 @@ name = "pack" path = "integrate.rs" [dev-dependencies] +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-pack = { path = "..", features = ["generate", "streaming-input"] } gix-features = { path = "../../gix-features" } gix-testtools = { path = "../../tests/tools" } diff --git a/gix-pack/tests/pack/bundle.rs b/gix-pack/tests/pack/bundle.rs index 4c8951d505..cc75ca8061 100644 --- a/gix-pack/tests/pack/bundle.rs +++ b/gix-pack/tests/pack/bundle.rs @@ -103,8 +103,8 @@ mod write_to_directory { Ok(pack::bundle::write::Outcome { index: pack::index::write::Outcome { index_version: pack::index::Version::V2, - index_hash: gix_hash::ObjectId::from_hex(b"544a7204a55f6e9cacccf8f6e191ea8f83575de3")?, - data_hash: gix_hash::ObjectId::from_hex(b"0f3ea84cd1bba10c2a03d736a460635082833e59")?, + index_hash: gix_hash::ObjectId::from_hex(b"544a7204a55f6e9cacccf8f6e191ea8f83575de3").map_err(gix_error::Exn::into_error)?, + data_hash: gix_hash::ObjectId::from_hex(b"0f3ea84cd1bba10c2a03d736a460635082833e59").map_err(gix_error::Exn::into_error)?, num_objects: 42, }, pack_version: pack::data::Version::V2, diff --git a/gix-pack/tests/pack/index.rs b/gix-pack/tests/pack/index.rs index 405d5775fe..f7efd299b8 100644 --- a/gix-pack/tests/pack/index.rs +++ b/gix-pack/tests/pack/index.rs @@ -26,7 +26,7 @@ mod version { (b"ffffffffffffffffffffffffffffffffffffffff", None, "not in pack"), ] { assert_eq!( - file.lookup(gix_hash::ObjectId::from_hex(id)?), + file.lookup(gix_hash::ObjectId::from_hex(id).map_err(gix_error::Exn::into_error)?), *desired_index, "{assertion}", ); @@ -39,7 +39,7 @@ mod version { assert_eq!(entry.crc32, file.crc32_at_index(index)); let hex_len = (entry_index % object_hash.len_in_hex()).max(7); - let prefix = gix_hash::Prefix::new(&entry.oid, hex_len)?; + let prefix = gix_hash::Prefix::new(&entry.oid, hex_len).map_err(gix_error::Exn::into_error)?; assert_eq!( file.lookup_prefix(prefix, candidates.as_mut()) .expect("object exists") @@ -70,10 +70,10 @@ mod version { (b"ffffffffffffffffffffffffffffffffffffffff", None, "not in pack", 7), ] { for mut candidates in [None, Some(1..1)] { - let id = gix_hash::ObjectId::from_hex(id)?; + let id = gix_hash::ObjectId::from_hex(id).map_err(gix_error::Exn::into_error)?; assert_eq!(file.lookup(id), expected, "{assertion_message}"); assert_eq!( - file.lookup_prefix(gix_hash::Prefix::new(&id, hex_len)?, candidates.as_mut()), + file.lookup_prefix(gix_hash::Prefix::new(&id, hex_len).map_err(gix_error::Exn::into_error)?, candidates.as_mut()), expected.map(Ok) ); if let Some(candidates) = candidates { @@ -92,7 +92,7 @@ mod version { assert_eq!(entry.crc32, file.crc32_at_index(index), "{index} {entry:?}"); let hex_len = (entry_index % object_hash.len_in_hex()).max(7); - let prefix = gix_hash::Prefix::new(&entry.oid, hex_len)?; + let prefix = gix_hash::Prefix::new(&entry.oid, hex_len).map_err(gix_error::Exn::into_error)?; assert_eq!( file.lookup_prefix(prefix, candidates.as_mut()) .expect("object exists") diff --git a/gix-packetline/Cargo.toml b/gix-packetline/Cargo.toml index 5b153ec8d2..5933fd7c83 100644 --- a/gix-packetline/Cargo.toml +++ b/gix-packetline/Cargo.toml @@ -42,7 +42,7 @@ required-features = ["blocking-io", "maybe-async/is_sync"] gix-trace = { version = "^0.1.15", path = "../gix-trace" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["std", "derive"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } faster-hex = { version = "0.10.0", default-features = false, features = ["std"] } bstr = { version = "1.12.0", default-features = false, features = ["std"] } # async support diff --git a/gix-packetline/src/async_io/encode.rs b/gix-packetline/src/async_io/encode.rs index bc1557e089..c52046a9e4 100644 --- a/gix-packetline/src/async_io/encode.rs +++ b/gix-packetline/src/async_io/encode.rs @@ -6,11 +6,11 @@ use std::{ use futures_io::AsyncWrite; use futures_lite::AsyncWriteExt; +use gix_error::ErrorExt; use crate::{ - encode::{u16_to_hex, Error}, - BandRef, Channel, ErrorRef, PacketLineRef, TextRef, DELIMITER_LINE, ERR_PREFIX, FLUSH_LINE, MAX_DATA_LEN, - RESPONSE_END_LINE, + encode::u16_to_hex, BandRef, Channel, ErrorRef, PacketLineRef, TextRef, DELIMITER_LINE, ERR_PREFIX, FLUSH_LINE, + MAX_DATA_LEN, RESPONSE_END_LINE, }; pin_project_lite::pin_project! { @@ -59,14 +59,16 @@ impl AsyncWrite for LineWriter<'_, W> { State::Idle => { let data_len = this.prefix.len() + data.len() + this.suffix.len(); if data_len > MAX_DATA_LEN { - let err = Error::DataLengthLimitExceeded { - length_in_bytes: data_len, - }; - return Poll::Ready(Err(io::Error::other(err))); + return Poll::Ready(Err(io::Error::other( + gix_error::message!("Cannot encode more than {MAX_DATA_LEN} bytes, got {data_len}") + .raise() + .into_error(), + ))); } if data.is_empty() { - let err = Error::DataIsEmpty; - return Poll::Ready(Err(io::Error::other(err))); + return Poll::Ready(Err(io::Error::other( + gix_error::message("Empty lines are invalid").raise().into_error(), + ))); } let data_len = data_len + 4; let len_buf = u16_to_hex(data_len as u16); @@ -148,14 +150,16 @@ async fn prefixed_and_suffixed_data_to_write( ) -> io::Result { let data_len = prefix.len() + data.len() + suffix.len(); if data_len > MAX_DATA_LEN { - let err = Error::DataLengthLimitExceeded { - length_in_bytes: data_len, - }; - return Err(io::Error::other(err)); + return Err(io::Error::other( + gix_error::message!("Cannot encode more than {MAX_DATA_LEN} bytes, got {data_len}") + .raise() + .into_error(), + )); } if data.is_empty() { - let err = Error::DataIsEmpty; - return Err(io::Error::other(err)); + return Err(io::Error::other( + gix_error::message("Empty lines are invalid").raise().into_error(), + )); } let data_len = data_len + 4; diff --git a/gix-packetline/src/async_io/sidebands.rs b/gix-packetline/src/async_io/sidebands.rs index 6407ef31eb..1a1fee9d3a 100644 --- a/gix-packetline/src/async_io/sidebands.rs +++ b/gix-packetline/src/async_io/sidebands.rs @@ -273,13 +273,13 @@ where }; let line = match line { - Some(line) => line?.map_err(io::Error::other)?, + Some(line) => line?.map_err(|e| io::Error::other(e.into_error()))?, None => break (0, 0), }; match this.handle_progress.as_mut() { Some(handle_progress) => { - let band = line.decode_band().map_err(io::Error::other)?; + let band = line.decode_band().map_err(|e| io::Error::other(e.into_error()))?; const ENCODED_BAND: usize = 1; match band { BandRef::Data(d) => { diff --git a/gix-packetline/src/blocking_io/encode.rs b/gix-packetline/src/blocking_io/encode.rs index 0434f136ec..ded3621753 100644 --- a/gix-packetline/src/blocking_io/encode.rs +++ b/gix-packetline/src/blocking_io/encode.rs @@ -1,9 +1,10 @@ use std::io; +use gix_error::ErrorExt; + use crate::{ - encode::{u16_to_hex, Error}, - BandRef, Channel, ErrorRef, PacketLineRef, TextRef, DELIMITER_LINE, ERR_PREFIX, FLUSH_LINE, MAX_DATA_LEN, - RESPONSE_END_LINE, + encode::u16_to_hex, BandRef, Channel, ErrorRef, PacketLineRef, TextRef, DELIMITER_LINE, ERR_PREFIX, FLUSH_LINE, + MAX_DATA_LEN, RESPONSE_END_LINE, }; /// Write a response-end message to `out`. @@ -86,12 +87,16 @@ fn prefixed_and_suffixed_data_to_write( ) -> io::Result { let data_len = prefix.len() + data.len() + suffix.len(); if data_len > MAX_DATA_LEN { - return Err(io::Error::other(Error::DataLengthLimitExceeded { - length_in_bytes: data_len, - })); + return Err(io::Error::other( + gix_error::message!("Cannot encode more than {MAX_DATA_LEN} bytes, got {data_len}") + .raise() + .into_error(), + )); } if data.is_empty() { - return Err(io::Error::other(Error::DataIsEmpty)); + return Err(io::Error::other( + gix_error::message("Empty lines are invalid").raise().into_error(), + )); } let data_len = data_len + 4; diff --git a/gix-packetline/src/blocking_io/sidebands.rs b/gix-packetline/src/blocking_io/sidebands.rs index 4930935a98..d832b482a4 100644 --- a/gix-packetline/src/blocking_io/sidebands.rs +++ b/gix-packetline/src/blocking_io/sidebands.rs @@ -137,12 +137,12 @@ where if self.pos >= self.cap { let (ofs, cap) = loop { let line = match self.parent.read_line() { - Some(line) => line?.map_err(io::Error::other)?, + Some(line) => line?.map_err(|e| io::Error::other(e.into_error()))?, None => break (0, 0), }; match self.handle_progress.as_mut() { Some(handle_progress) => { - let band = line.decode_band().map_err(io::Error::other)?; + let band = line.decode_band().map_err(|e| io::Error::other(e.into_error()))?; const ENCODED_BAND: usize = 1; match band { BandRef::Data(d) => { diff --git a/gix-packetline/src/decode.rs b/gix-packetline/src/decode.rs index 66dec96bec..a5cefc39eb 100644 --- a/gix-packetline/src/decode.rs +++ b/gix-packetline/src/decode.rs @@ -1,36 +1,14 @@ -use bstr::BString; +use gix_error::ErrorExt; use crate::{PacketLineRef, DELIMITER_LINE, FLUSH_LINE, MAX_DATA_LEN, MAX_LINE_LEN, RESPONSE_END_LINE, U16_HEX_BYTES}; /// The error used in the [`decode`][mod@crate::decode] module -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("Failed to decode the first four hex bytes indicating the line length: {err}")] - HexDecode { err: String }, - #[error("The data received claims to be larger than the maximum allowed size: got {length_in_bytes}, exceeds {MAX_DATA_LEN}")] - DataLengthLimitExceeded { length_in_bytes: usize }, - #[error("Received an invalid empty line")] - DataIsEmpty, - #[error("Received an invalid line of length 3")] - InvalidLineLength, - #[error("{data:?} - consumed {bytes_consumed} bytes")] - Line { data: BString, bytes_consumed: usize }, - #[error("Needing {bytes_needed} additional bytes to decode the line successfully")] - NotEnoughData { bytes_needed: usize }, -} +pub type Error = gix_error::Exn; /// pub mod band { /// The error used in [`PacketLineRef::decode_band()`][super::PacketLineRef::decode_band()]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("attempt to decode a non-side channel line or input was malformed: {band_id}")] - InvalidSideBand { band_id: u8 }, - #[error("attempt to decode a non-data line into a side-channel band")] - NonDataLine, - } + pub type Error = gix_error::Exn; } /// A utility return type to support incremental parsing of packet lines. @@ -72,14 +50,16 @@ pub fn hex_prefix(four_bytes: &[u8]) -> Result, Error } let mut buf = [0u8; U16_HEX_BYTES / 2]; - faster_hex::hex_decode(four_bytes, &mut buf).map_err(|err| Error::HexDecode { err: err.to_string() })?; + faster_hex::hex_decode(four_bytes, &mut buf).map_err(|err| { + gix_error::message!("Failed to decode the first four hex bytes indicating the line length: {err}").raise() + })?; let wanted_bytes = u16::from_be_bytes(buf); if wanted_bytes == 3 { - return Err(Error::InvalidLineLength); + return Err(gix_error::message("Received an invalid line of length 3").raise()); } if wanted_bytes == 4 { - return Err(Error::DataIsEmpty); + return Err(gix_error::message("Received an invalid empty line").raise()); } debug_assert!( wanted_bytes as usize > U16_HEX_BYTES, @@ -91,9 +71,8 @@ pub fn hex_prefix(four_bytes: &[u8]) -> Result, Error /// Obtain a `PacketLine` from `data` after assuring `data` is small enough to fit. pub fn to_data_line(data: &[u8]) -> Result, Error> { if data.len() > MAX_LINE_LEN { - return Err(Error::DataLengthLimitExceeded { - length_in_bytes: data.len(), - }); + let length_in_bytes = data.len(); + return Err(gix_error::message!("The data received claims to be larger than the maximum allowed size: got {length_in_bytes}, exceeds {MAX_DATA_LEN}").raise()); } Ok(PacketLineRef::Data(data)) @@ -117,9 +96,7 @@ pub fn streaming(data: &[u8]) -> Result, Error> { } } + U16_HEX_BYTES; if wanted_bytes > MAX_LINE_LEN { - return Err(Error::DataLengthLimitExceeded { - length_in_bytes: wanted_bytes, - }); + return Err(gix_error::message!("The data received claims to be larger than the maximum allowed size: got {wanted_bytes}, exceeds {MAX_DATA_LEN}").raise()); } if data_len < wanted_bytes { return Ok(Stream::Incomplete { @@ -140,6 +117,8 @@ pub fn streaming(data: &[u8]) -> Result, Error> { pub fn all_at_once(data: &[u8]) -> Result, Error> { match streaming(data)? { Stream::Complete { line, .. } => Ok(line), - Stream::Incomplete { bytes_needed } => Err(Error::NotEnoughData { bytes_needed }), + Stream::Incomplete { bytes_needed } => { + Err(gix_error::message!("Needing {bytes_needed} additional bytes to decode the line successfully").raise()) + } } } diff --git a/gix-packetline/src/encode.rs b/gix-packetline/src/encode.rs index 791b5355cf..477bb954de 100644 --- a/gix-packetline/src/encode.rs +++ b/gix-packetline/src/encode.rs @@ -1,14 +1,5 @@ -use super::MAX_DATA_LEN; - /// The error returned by most functions in the [`encode`](crate::encode) module -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("Cannot encode more than {MAX_DATA_LEN} bytes, got {length_in_bytes}")] - DataLengthLimitExceeded { length_in_bytes: usize }, - #[error("Empty lines are invalid")] - DataIsEmpty, -} +pub type Error = gix_error::Exn; pub(crate) fn u16_to_hex(value: u16) -> [u8; 4] { let mut buf = [0u8; 4]; diff --git a/gix-packetline/src/lib.rs b/gix-packetline/src/lib.rs index 957a318355..b4bbc3f9c0 100644 --- a/gix-packetline/src/lib.rs +++ b/gix-packetline/src/lib.rs @@ -9,6 +9,7 @@ #![deny(missing_docs, rust_2018_idioms, unsafe_code)] use bstr::BStr; +use gix_error::ErrorExt; /// #[cfg(feature = "async-io")] @@ -127,12 +128,19 @@ impl<'a> PacketLineRef<'a> { /// Decode the band of this [`slice`](PacketLineRef::as_slice()) pub fn decode_band(&self) -> Result, decode::band::Error> { - let d = self.as_slice().ok_or(decode::band::Error::NonDataLine)?; + let d = self + .as_slice() + .ok_or_else(|| gix_error::message("attempt to decode a non-data line into a side-channel band").raise())?; Ok(match d[0] { 1 => BandRef::Data(&d[1..]), 2 => BandRef::Progress(&d[1..]), 3 => BandRef::Error(&d[1..]), - band => return Err(decode::band::Error::InvalidSideBand { band_id: band }), + band_id => { + return Err(gix_error::message!( + "attempt to decode a non-side channel line or input was malformed: {band_id}" + ) + .raise()) + } }) } } diff --git a/gix-packetline/tests/async-packetline.rs b/gix-packetline/tests/async-packetline.rs index d254725560..29eda6d634 100644 --- a/gix-packetline/tests/async-packetline.rs +++ b/gix-packetline/tests/async-packetline.rs @@ -6,7 +6,14 @@ pub fn assert_err_display( ) { match res { Ok(v) => panic!("Expected error '{}', got value {:?}", expected.as_ref(), v), - Err(err) => assert_eq!(err.to_string(), expected.as_ref()), + Err(err) => { + let msg = err.to_string(); + assert!( + msg.starts_with(expected.as_ref()), + "Expected error starting with '{}', got '{msg}'", + expected.as_ref() + ); + } } } diff --git a/gix-packetline/tests/blocking-packetline.rs b/gix-packetline/tests/blocking-packetline.rs index fd4c985ab0..44cac29479 100644 --- a/gix-packetline/tests/blocking-packetline.rs +++ b/gix-packetline/tests/blocking-packetline.rs @@ -6,7 +6,14 @@ pub fn assert_err_display( ) { match res { Ok(v) => panic!("Expected error '{}', got value {:?}", expected.as_ref(), v), - Err(err) => assert_eq!(err.to_string(), expected.as_ref()), + Err(err) => { + let msg = err.to_string(); + assert!( + msg.starts_with(expected.as_ref()), + "Expected error starting with '{}', got '{msg}'", + expected.as_ref() + ); + } } } diff --git a/gix-packetline/tests/decode/mod.rs b/gix-packetline/tests/decode/mod.rs index f8c94d65dd..0156dc0b65 100644 --- a/gix-packetline/tests/decode/mod.rs +++ b/gix-packetline/tests/decode/mod.rs @@ -4,14 +4,12 @@ mod streaming { ErrorRef, PacketLineRef, }; - use crate::assert_err_display; - fn assert_complete( res: Result, expected_consumed: usize, expected_value: PacketLineRef, ) -> crate::Result { - match res? { + match res.map_err(gix_error::Exn::into_error)? { Stream::Complete { line, bytes_consumed } => { assert_eq!(bytes_consumed, expected_consumed); assert_eq!(line.as_bstr(), expected_value.as_bstr()); @@ -21,6 +19,19 @@ mod streaming { Ok(()) } + fn assert_err_display(res: Result, expected: &str) { + match res { + Ok(v) => panic!("Expected error '{expected}', got value {v:?}"), + Err(err) => { + let msg = err.to_string(); + assert!( + msg.starts_with(expected), + "Expected error starting with '{expected}', got '{msg}'" + ); + } + } + } + mod round_trip { use bstr::ByteSlice; use gix_packetline::{decode, decode::streaming, Channel, PacketLineRef}; @@ -33,7 +44,7 @@ mod streaming { #[maybe_async::test(feature = "blocking-io", async(feature = "async-io", async_std::test))] async fn trailing_line_feeds_are_removed_explicitly() -> crate::Result { - let line = decode::all_at_once(b"0006a\n")?; + let line = decode::all_at_once(b"0006a\n").map_err(gix_error::Exn::into_error)?; assert_eq!(line.as_text().expect("text").0.as_bstr(), b"a".as_bstr()); let mut out = Vec::new(); encode_io::write_text(&line.as_text().expect("text"), &mut out) @@ -66,7 +77,7 @@ mod streaming { &mut out, ) .await?; - let line = decode::all_at_once(&out)?; + let line = decode::all_at_once(&out).map_err(gix_error::Exn::into_error)?; assert_eq!(line.check_error().expect("err").0, b"the error"); Ok(()) } @@ -79,7 +90,7 @@ mod streaming { .as_band(*channel) .expect("data is valid for band"); encode_io::write_band(&band, &mut out).await?; - let line = decode::all_at_once(&out)?; + let line = decode::all_at_once(&out).map_err(gix_error::Exn::into_error)?; assert_eq!(line.decode_band().expect("valid band"), band); } Ok(()) @@ -141,7 +152,7 @@ mod streaming { use gix_packetline::decode::{self, streaming, Stream}; fn assert_incomplete(res: Result, expected_missing: usize) -> crate::Result { - match res? { + match res.map_err(gix_error::Exn::into_error)? { Stream::Complete { .. } => { panic!("expected parsing to be partial, not complete"); } diff --git a/gix-packetline/tests/read/mod.rs b/gix-packetline/tests/read/mod.rs index 847f678f87..27f3b9381e 100644 --- a/gix-packetline/tests/read/mod.rs +++ b/gix-packetline/tests/read/mod.rs @@ -26,7 +26,7 @@ pub mod streaming_peek_iter { async fn peek_follows_read_line_delimiter_logic() -> crate::Result { let mut rd = StreamingPeekableIter::new(&b"0005a00000005b"[..], &[PacketLineRef::Flush], false); let res = rd.peek_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a")); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Data(b"a")); rd.read_line().await; let res = rd.peek_line().await; @@ -41,7 +41,7 @@ pub mod streaming_peek_iter { rd.reset(); let res = rd.peek_line().await; assert_eq!( - res.expect("line")??, + res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Data(b"b"), "after resetting, we get past the delimiter" ); @@ -53,7 +53,7 @@ pub mod streaming_peek_iter { let mut rd = StreamingPeekableIter::new(&b"0005a0009ERR e0000"[..], &[PacketLineRef::Flush], false); rd.fail_on_err_lines(true); let res = rd.peek_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a")); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Data(b"a")); rd.read_line().await; let res = rd.peek_line().await; assert_eq!( @@ -80,11 +80,11 @@ pub mod streaming_peek_iter { let mut rd = StreamingPeekableIter::new(&b"0005a0009ERR e0000"[..], &[PacketLineRef::Flush], false); rd.fail_on_err_lines(false); let res = rd.peek_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Data(b"a")); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Data(b"a")); rd.read_line().await; let res = rd.peek_line().await; assert_eq!( - res.expect("line")??, + res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Data(b"ERR e"), "we read the ERR but it's not interpreted as such" ); @@ -100,12 +100,12 @@ pub mod streaming_peek_iter { async fn peek_non_data() -> crate::Result { let mut rd = StreamingPeekableIter::new(&b"000000010002"[..], &[PacketLineRef::ResponseEnd], false); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Flush); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Flush); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Delimiter); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Delimiter); rd.reset_with(&[PacketLineRef::Flush]); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::ResponseEnd); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::ResponseEnd); for _ in 0..2 { let res = rd.peek_line().await; assert_eq!( @@ -127,10 +127,10 @@ pub mod streaming_peek_iter { let input = b"00010009ERR e0002"; let mut rd = StreamingPeekableIter::new(&input[..], &[], false); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Delimiter); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Delimiter); let res = rd.read_line().await; assert_eq!( - res.expect("line")??.as_bstr(), + res.expect("line")?.map_err(gix_error::Exn::into_error)?.as_bstr(), Some(b"ERR e".as_bstr()), "by default no special handling" ); @@ -138,7 +138,7 @@ pub mod streaming_peek_iter { let mut rd = StreamingPeekableIter::new(&input[..], &[], false); rd.fail_on_err_lines(true); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Delimiter); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Delimiter); let res = rd.read_line().await; assert_eq!( res.expect("line").unwrap_err().to_string(), @@ -150,10 +150,10 @@ pub mod streaming_peek_iter { rd.replace(input); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, PacketLineRef::Delimiter); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, PacketLineRef::Delimiter); let res = rd.read_line().await; assert_eq!( - res.expect("line")??.as_bstr(), + res.expect("line")?.map_err(gix_error::Exn::into_error)?.as_bstr(), Some(b"ERR e".as_bstr()), "a 'replace' also resets error handling to the default: false" ); @@ -165,24 +165,24 @@ pub mod streaming_peek_iter { let bytes = fixture_bytes("v1/fetch/01-many-refs.response"); let mut rd = StreamingPeekableIter::new(&bytes[..], &[PacketLineRef::Flush], false); let res = rd.peek_line().await; - assert_eq!(res.expect("line")??, first_line(), "peek returns first line"); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, first_line(), "peek returns first line"); let res = rd.peek_line().await; assert_eq!( - res.expect("line")??, + res.expect("line")?.map_err(gix_error::Exn::into_error)?, first_line(), "peeked lines are never exhausted, unless they are finally read" ); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, first_line(), "read_line returns the peek once"); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, first_line(), "read_line returns the peek once"); let res = rd.read_line().await; assert_eq!( - res.expect("line")??.as_bstr(), + res.expect("line")?.map_err(gix_error::Exn::into_error)?.as_bstr(), Some(b"7814e8a05a59c0cf5fb186661d1551c75d1299b5 refs/heads/master\n".as_bstr()), "the next read_line returns the next line" ); let res = rd.peek_line().await; assert_eq!( - res.expect("line")??.as_bstr(), + res.expect("line")?.map_err(gix_error::Exn::into_error)?.as_bstr(), Some(b"7814e8a05a59c0cf5fb186661d1551c75d1299b5 refs/remotes/origin/HEAD\n".as_bstr()), "peek always gets the next line verbatim" ); @@ -202,7 +202,7 @@ pub mod streaming_peek_iter { bytes.extend(fixture_bytes("v1/fetch/01-many-refs.response")); let mut rd = StreamingPeekableIter::new(&bytes[..], &[PacketLineRef::Flush], false); let res = rd.read_line().await; - assert_eq!(res.expect("line")??, first_line()); + assert_eq!(res.expect("line")?.map_err(gix_error::Exn::into_error)?, first_line()); let res = exhaust(&mut rd).await; assert_eq!(res + 1, 1561, "it stops after seeing the flush byte"); rd.reset(); diff --git a/gix-packetline/tests/read/sideband.rs b/gix-packetline/tests/read/sideband.rs index b00f265277..200a0ee127 100644 --- a/gix-packetline/tests/read/sideband.rs +++ b/gix-packetline/tests/read/sideband.rs @@ -51,7 +51,7 @@ async fn read_pack_with_progress_extraction() -> crate::Result { let res = rd.read_line().await; assert_eq!( - res.expect("line")??.as_text().expect("data line").0.as_bstr(), + res.expect("line")?.map_err(gix_error::Exn::into_error)?.as_text().expect("data line").0.as_bstr(), b"NAK".as_bstr() ); let mut seen_texts = Vec::::new(); @@ -156,9 +156,9 @@ async fn readline_reads_one_packet_line_at_a_time() -> crate::Result { let mut rd = StreamingPeekableIter::new(&buf[..], &[PacketLineRef::Flush], false); let mut r = rd.as_read(); - let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap(); + let line = r.read_data_line().await.unwrap()?.map_err(gix_error::Exn::into_error)?.as_bstr().unwrap(); assert_eq!(line, "808e50d724f604f69ab93c6da2919c014667bedb HEAD\0multi_ack thin-pack side-band side-band-64k ofs-delta shallow deepen-since deepen-not deepen-relative no-progress include-tag multi_ack_detailed symref=HEAD:refs/heads/master object-format=sha1 agent=git/2.28.0\n"); - let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap(); + let line = r.read_data_line().await.unwrap()?.map_err(gix_error::Exn::into_error)?.as_bstr().unwrap(); assert_eq!(line, "808e50d724f604f69ab93c6da2919c014667bedb refs/heads/master\n"); let line = r.read_data_line().await; assert!(line.is_none(), "flush means `None`"); @@ -174,20 +174,20 @@ async fn readline_reads_one_packet_line_at_a_time() -> crate::Result { rd.reset(); let mut r = rd.as_read(); - let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap(); + let line = r.read_data_line().await.unwrap()?.map_err(gix_error::Exn::into_error)?.as_bstr().unwrap(); assert_eq!(line.as_bstr(), "NAK\n"); drop(r); let mut r = rd.as_read_with_sidebands(|_, _| std::ops::ControlFlow::Continue(())); - let line = r.read_data_line().await.unwrap()??.as_bstr().unwrap(); + let line = r.read_data_line().await.unwrap()?.map_err(gix_error::Exn::into_error)?.as_bstr().unwrap(); assert_eq!( line.as_bstr(), "\x02Enumerating objects: 3, done.\n", "sidebands are ignored entirely here" ); for _ in 0..6 { - let _discard_more_progress = r.read_data_line().await.unwrap()??.as_bstr().unwrap(); + let _discard_more_progress = r.read_data_line().await.unwrap()?.map_err(gix_error::Exn::into_error)?.as_bstr().unwrap(); } let line = r.read_data_line().await; assert!(line.is_none(), "and we have reached the end"); @@ -201,7 +201,7 @@ async fn peek_past_an_actual_eof_is_an_error() -> crate::Result { let mut rd = StreamingPeekableIter::new(&input[..], &[], false); let mut reader = rd.as_read(); let res = reader.peek_data_line().await; - assert_eq!(res.expect("one line")??, b"ERR e"); + assert_eq!(res.expect("one line")?.map_err(gix_error::Exn::into_error)?, b"ERR e"); let mut buf = String::new(); reader.read_line_to_string(&mut buf).await?; @@ -225,7 +225,7 @@ async fn peek_past_a_delimiter_is_no_error() -> crate::Result { let mut rd = StreamingPeekableIter::new(&input[..], &[PacketLineRef::Flush], false); let mut reader = rd.as_read(); let res = reader.peek_data_line().await; - assert_eq!(res.expect("one line")??, b"hello"); + assert_eq!(res.expect("one line")?.map_err(gix_error::Exn::into_error)?, b"hello"); let mut buf = String::new(); reader.read_line_to_string(&mut buf).await?; diff --git a/gix-path/Cargo.toml b/gix-path/Cargo.toml index 0bdbf309ab..038de744f1 100644 --- a/gix-path/Cargo.toml +++ b/gix-path/Cargo.toml @@ -18,7 +18,7 @@ doctest = true gix-trace = { version = "^0.1.14", path = "../gix-trace" } gix-validate = { version = "^0.11.0", path = "../gix-validate" } bstr = { version = "1.12.0", default-features = false, features = ["std"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } [dev-dependencies] gix-testtools = { path = "../tests/tools" } diff --git a/gix-path/src/realpath.rs b/gix-path/src/realpath.rs index e3b1cbd4c2..fa8de3f4b1 100644 --- a/gix-path/src/realpath.rs +++ b/gix-path/src/realpath.rs @@ -1,20 +1,5 @@ /// The error returned by [`realpath()`][super::realpath()]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("The maximum allowed number {} of symlinks in path is exceeded", .max_symlinks)] - MaxSymlinksExceeded { max_symlinks: u8 }, - #[error("Cannot resolve symlinks in path with more than {max_symlink_checks} components (takes too long)")] - ExcessiveComponentCount { max_symlink_checks: usize }, - #[error(transparent)] - ReadLink(std::io::Error), - #[error(transparent)] - CurrentWorkingDir(std::io::Error), - #[error("Empty is not a valid path")] - EmptyPath, - #[error("Ran out of path components while following parent component '..'")] - MissingParent, -} +pub type Error = gix_error::Exn; /// The default amount of symlinks we may follow when resolving a path in [`realpath()`][crate::realpath()]. pub const MAX_SYMLINKS: u8 = 32; @@ -25,6 +10,8 @@ pub(crate) mod function { Path, PathBuf, }; + use gix_error::{message, ErrorExt, ResultExt}; + use super::Error; use crate::realpath::MAX_SYMLINKS; @@ -40,7 +27,7 @@ pub(crate) mod function { .is_relative() .then(std::env::current_dir) .unwrap_or_else(|| Ok(PathBuf::default())) - .map_err(Error::CurrentWorkingDir)?; + .or_raise(|| message("Could not determine current working directory"))?; realpath_opts(path, &cwd, MAX_SYMLINKS) } @@ -48,7 +35,7 @@ pub(crate) mod function { /// This serves to avoid running into cycles or doing unreasonable amounts of work. pub fn realpath_opts(path: &Path, cwd: &Path, max_symlinks: u8) -> Result { if path.as_os_str().is_empty() { - return Err(Error::EmptyPath); + return Err(message("Empty is not a valid path").raise()); } let mut real_path = PathBuf::new(); @@ -67,7 +54,9 @@ pub(crate) mod function { CurDir => {} ParentDir => { if !real_path.pop() { - return Err(Error::MissingParent); + return Err( + message("Ran out of path components while following parent component '..'").raise(), + ); } } Normal(part) => { @@ -76,9 +65,13 @@ pub(crate) mod function { if real_path.is_symlink() { num_symlinks += 1; if num_symlinks > max_symlinks { - return Err(Error::MaxSymlinksExceeded { max_symlinks }); + return Err(gix_error::message!( + "The maximum allowed number {max_symlinks} of symlinks in path is exceeded" + ) + .raise()); } - let mut link_destination = std::fs::read_link(real_path.as_path()).map_err(Error::ReadLink)?; + let mut link_destination = + std::fs::read_link(real_path.as_path()).or_raise(|| message("Could not read symlink"))?; if link_destination.is_absolute() { // pushing absolute path to real_path resets it to the pushed absolute path } else { @@ -89,9 +82,7 @@ pub(crate) mod function { components = path_backing.components(); } if symlink_checks > MAX_SYMLINK_CHECKS { - return Err(Error::ExcessiveComponentCount { - max_symlink_checks: MAX_SYMLINK_CHECKS, - }); + return Err(gix_error::message!("Cannot resolve symlinks in path with more than {MAX_SYMLINK_CHECKS} components (takes too long)").raise()); } } } diff --git a/gix-path/src/relative_path.rs b/gix-path/src/relative_path.rs index c0b286d905..f2125cc12b 100644 --- a/gix-path/src/relative_path.rs +++ b/gix-path/src/relative_path.rs @@ -1,6 +1,7 @@ use std::path::Path; use bstr::{BStr, BString, ByteSlice}; +use gix_error::{ErrorExt, ResultExt}; use gix_validate::path::component::Options; use crate::{os_str_into_bstr, try_from_bstr, try_from_byte_slice}; @@ -12,7 +13,7 @@ pub(super) mod types { /// - The path separator always is `/`, independent of the platform. /// - Only normal components are allowed. /// - It is always represented as a bunch of bytes. - #[derive()] + #[derive(Debug)] pub struct RelativePath { inner: BStr, } @@ -37,27 +38,20 @@ impl RelativePath { } /// The error used in [`RelativePath`]. -#[derive(Debug, thiserror::Error)] -#[allow(missing_docs)] -pub enum Error { - #[error("A RelativePath is not allowed to be absolute")] - IsAbsolute, - #[error(transparent)] - ContainsInvalidComponent(#[from] gix_validate::path::component::Error), - #[error(transparent)] - IllegalUtf8(#[from] crate::Utf8Error), -} +pub type Error = gix_error::Exn; fn relative_path_from_value_and_path<'a>(path_bstr: &'a BStr, path: &Path) -> Result<&'a RelativePath, Error> { if path.is_absolute() { - return Err(Error::IsAbsolute); + return Err(gix_error::message("A RelativePath is not allowed to be absolute").raise()); } let options = Options::default(); for component in path.components() { - let component = os_str_into_bstr(component.as_os_str())?; - gix_validate::path::component(component, None, options)?; + let component = os_str_into_bstr(component.as_os_str()) + .or_raise(|| gix_error::message("Path component contains invalid UTF-8"))?; + gix_validate::path::component(component, None, options) + .or_raise(|| gix_error::message("Path contains invalid component"))?; } RelativePath::new_unchecked(BStr::new(path_bstr.as_bytes())) @@ -75,7 +69,7 @@ impl<'a> TryFrom<&'a BStr> for &'a RelativePath { type Error = Error; fn try_from(value: &'a BStr) -> Result { - let path = try_from_bstr(value)?; + let path = try_from_bstr(value).or_raise(|| gix_error::message("Path contains illegal UTF-8"))?; relative_path_from_value_and_path(value, &path) } } @@ -85,7 +79,7 @@ impl<'a> TryFrom<&'a [u8]> for &'a RelativePath { #[inline] fn try_from(value: &'a [u8]) -> Result { - let path = try_from_byte_slice(value)?; + let path = try_from_byte_slice(value).or_raise(|| gix_error::message("Path contains illegal UTF-8"))?; relative_path_from_value_and_path(value.as_bstr(), path) } } @@ -95,7 +89,8 @@ impl<'a, const N: usize> TryFrom<&'a [u8; N]> for &'a RelativePath { #[inline] fn try_from(value: &'a [u8; N]) -> Result { - let path = try_from_byte_slice(value.as_bstr())?; + let path = + try_from_byte_slice(value.as_bstr()).or_raise(|| gix_error::message("Path contains illegal UTF-8"))?; relative_path_from_value_and_path(value.as_bstr(), path) } } @@ -104,7 +99,7 @@ impl<'a> TryFrom<&'a BString> for &'a RelativePath { type Error = Error; fn try_from(value: &'a BString) -> Result { - let path = try_from_bstr(value.as_bstr())?; + let path = try_from_bstr(value.as_bstr()).or_raise(|| gix_error::message("Path contains illegal UTF-8"))?; relative_path_from_value_and_path(value.as_bstr(), &path) } } diff --git a/gix-path/tests/path/realpath.rs b/gix-path/tests/path/realpath.rs index 22a1384536..e38db150b6 100644 --- a/gix-path/tests/path/realpath.rs +++ b/gix-path/tests/path/realpath.rs @@ -4,7 +4,7 @@ use std::{ }; use bstr::ByteVec; -use gix_path::{realpath::Error, realpath_opts}; +use gix_path::realpath_opts; use gix_testtools::tempfile; #[test] @@ -12,12 +12,12 @@ fn fuzzed_timeout() -> crate::Result { let path = PathBuf::from(std::fs::read("tests/fixtures/fuzzed/54k-path-components.path")?.into_string()?); assert_eq!(path.components().count(), 54862); let start = std::time::Instant::now(); - assert!(matches!( - gix_path::realpath_opts(&path, Path::new("/cwd"), gix_path::realpath::MAX_SYMLINKS).unwrap_err(), - gix_path::realpath::Error::ExcessiveComponentCount { - max_symlink_checks: 2048 - } - )); + let err = gix_path::realpath_opts(&path, Path::new("/cwd"), gix_path::realpath::MAX_SYMLINKS).unwrap_err(); + assert!( + err.to_string() + .contains("Cannot resolve symlinks in path with more than 2048 components"), + "{err}" + ); assert!( start.elapsed() < Duration::from_millis(if cfg!(windows) { 1000 } else { 500 }), "took too long: {:.02} , we can't take too much time for this, and should keep the amount of work reasonable\ @@ -33,40 +33,38 @@ fn assorted() -> crate::Result { let cwd = cwd.path(); let symlinks_disabled = 0; + let err = realpath_opts("".as_ref(), cwd, symlinks_disabled).unwrap_err(); assert!( - matches!( - realpath_opts("".as_ref(), cwd, symlinks_disabled), - Err(Error::EmptyPath) - ), - "Empty path is not allowed" + err.to_string().contains("Empty is not a valid path"), + "Empty path is not allowed: {err}" ); assert_eq!( - realpath_opts("b/.git".as_ref(), cwd, symlinks_disabled)?, + realpath_opts("b/.git".as_ref(), cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, cwd.join("b").join(".git"), "relative paths are prefixed with current dir" ); assert_eq!( - realpath_opts("b//.git".as_ref(), cwd, symlinks_disabled)?, + realpath_opts("b//.git".as_ref(), cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, cwd.join("b").join(".git"), "empty path components are ignored" ); assert_eq!( - realpath_opts("./tmp/.git".as_ref(), cwd, symlinks_disabled)?, + realpath_opts("./tmp/.git".as_ref(), cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, cwd.join("tmp").join(".git"), "path starting with dot is relative and is prefixed with current dir" ); assert_eq!( - realpath_opts("./tmp/a/./.git".as_ref(), cwd, symlinks_disabled)?, + realpath_opts("./tmp/a/./.git".as_ref(), cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, cwd.join("tmp").join("a").join(".git"), "all ./ path components are ignored unless they the one at the beginning of the path" ); assert_eq!( - realpath_opts("./b/../tmp/.git".as_ref(), cwd, symlinks_disabled)?, + realpath_opts("./b/../tmp/.git".as_ref(), cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, cwd.join("tmp").join(".git"), "dot dot goes to parent path component" ); @@ -77,7 +75,7 @@ fn assorted() -> crate::Result { #[cfg(windows)] let absolute_path = Path::new(r"C:\c\d\.git"); assert_eq!( - realpath_opts(absolute_path, cwd, symlinks_disabled)?, + realpath_opts(absolute_path, cwd, symlinks_disabled).map_err(gix_error::Exn::into_error)?, absolute_path, "absolute path without symlinks has nothing to resolve and remains unchanged" ); @@ -96,12 +94,11 @@ fn link_cycle_is_detected() -> crate::Result { create_symlink(&link_path, link_destination)?; let max_symlinks = 8; + let err = realpath_opts(&link_path.join(".git"), "".as_ref(), max_symlinks).unwrap_err(); assert!( - matches!( - realpath_opts(&link_path.join(".git"), "".as_ref(), max_symlinks), - Err(Error::MaxSymlinksExceeded { max_symlinks: 8 }) - ), - "link cycle is detected" + err.to_string() + .contains("The maximum allowed number 8 of symlinks in path is exceeded"), + "link cycle is detected: {err}" ); Ok(()) } @@ -115,7 +112,7 @@ fn symlink_with_absolute_path_gets_expanded() -> crate::Result { create_symlink(&link_from, &link_to)?; let max_symlinks = 8; assert_eq!( - realpath_opts(&link_from.join(".git"), tmp_dir.path(), max_symlinks)?, + realpath_opts(&link_from.join(".git"), tmp_dir.path(), max_symlinks).map_err(gix_error::Exn::into_error)?, link_to.join(".git"), "symlink with absolute path gets expanded" ); @@ -129,7 +126,7 @@ fn symlink_to_relative_path_gets_expanded_into_absolute_path() -> crate::Result let link_name = "pq_link"; create_symlink(dir.join("r").join(link_name), Path::new("p").join("q"))?; assert_eq!( - realpath_opts(&Path::new(link_name).join(".git"), &dir.join("r"), 8)?, + realpath_opts(&Path::new(link_name).join(".git"), &dir.join("r"), 8).map_err(gix_error::Exn::into_error)?, dir.join("r").join("p").join("q").join(".git"), "symlink to relative path gets expanded into absolute path" ); @@ -141,12 +138,11 @@ fn symlink_processing_is_disabled_if_the_value_is_zero() -> crate::Result { let cwd = canonicalized_tempdir()?; let link_name = "x_link"; create_symlink(cwd.path().join(link_name), Path::new("link destination does not exist"))?; + let err = realpath_opts(&Path::new(link_name).join(".git"), cwd.path(), 0).unwrap_err(); assert!( - matches!( - realpath_opts(&Path::new(link_name).join(".git"), cwd.path(), 0), - Err(Error::MaxSymlinksExceeded { max_symlinks: 0 }) - ), - "symlink processing is disabled if the value is zero" + err.to_string() + .contains("The maximum allowed number 0 of symlinks in path is exceeded"), + "symlink processing is disabled if the value is zero: {err}" ); Ok(()) } @@ -164,6 +160,6 @@ fn create_symlink(from: impl AsRef, to: impl AsRef) -> std::io::Resu } fn canonicalized_tempdir() -> crate::Result { - let canonicalized_tempdir = gix_path::realpath(std::env::temp_dir())?; + let canonicalized_tempdir = gix_path::realpath(std::env::temp_dir()).map_err(gix_error::Exn::into_error)?; Ok(tempfile::tempdir_in(canonicalized_tempdir)?) } diff --git a/gix-path/tests/path/relative_path.rs b/gix-path/tests/path/relative_path.rs index c475ccb707..ccd21fb969 100644 --- a/gix-path/tests/path/relative_path.rs +++ b/gix-path/tests/path/relative_path.rs @@ -1,5 +1,5 @@ use bstr::{BStr, BString}; -use gix_path::{relative_path::Error, RelativePath}; +use gix_path::RelativePath; #[cfg(not(windows))] #[test] @@ -10,26 +10,20 @@ fn absolute_paths_return_err() { let path_u8: &[u8] = &b"/refs/heads"[..]; let path_bstring: BString = "/refs/heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8a), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::IsAbsolute) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8a).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); } #[cfg(windows)] @@ -40,22 +34,17 @@ fn absolute_paths_with_backslashes_return_err() { let path_u8: &[u8] = &b"c:\\refs\\heads"[..]; let path_bstring: BString = r"c:\refs\heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::IsAbsolute) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::IsAbsolute) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("not allowed to be absolute"), "{err}"); } #[test] @@ -65,22 +54,17 @@ fn dots_in_paths_return_err() { let path_u8: &[u8] = &b"./heads"[..]; let path_bstring: BString = "./heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::ContainsInvalidComponent(_)) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); } #[test] @@ -90,22 +74,17 @@ fn dots_in_paths_with_backslashes_return_err() { let path_u8: &[u8] = &b".\\heads"[..]; let path_bstring: BString = r".\heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::ContainsInvalidComponent(_)) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); } #[test] @@ -115,22 +94,17 @@ fn double_dots_in_paths_return_err() { let path_u8: &[u8] = &b"../heads"[..]; let path_bstring: BString = "../heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::ContainsInvalidComponent(_)) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); } #[test] @@ -140,20 +114,15 @@ fn double_dots_in_paths_with_backslashes_return_err() { let path_u8: &[u8] = &b"..\\heads"[..]; let path_bstring: BString = r"..\heads".into(); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_str), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_bstr), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(path_u8), - Err(Error::ContainsInvalidComponent(_)) - )); - assert!(matches!( - TryInto::<&RelativePath>::try_into(&path_bstring), - Err(Error::ContainsInvalidComponent(_)) - )); + let err = TryInto::<&RelativePath>::try_into(path_str).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_bstr).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(path_u8).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); + + let err = TryInto::<&RelativePath>::try_into(&path_bstring).unwrap_err(); + assert!(err.to_string().contains("invalid component"), "{err}"); } diff --git a/gix-pathspec/src/parse.rs b/gix-pathspec/src/parse.rs index eedf68e19f..5ebd080e4f 100644 --- a/gix-pathspec/src/parse.rs +++ b/gix-pathspec/src/parse.rs @@ -189,7 +189,9 @@ fn parse_attributes(input: &[u8]) -> Result, Err gix_attributes::parse::Iter::new(unescaped.as_bstr()) .map(|res| res.map(gix_attributes::AssignmentRef::to_owned)) .collect::, _>>() - .map_err(|e| Error::InvalidAttribute { attribute: e.attribute }) + .map_err(|e| Error::InvalidAttribute { + attribute: e.to_string().into(), + }) } fn unescape_attribute_values(input: &BStr) -> Result, Error> { diff --git a/gix-protocol/Cargo.toml b/gix-protocol/Cargo.toml index 4cca552ac5..e848082e59 100644 --- a/gix-protocol/Cargo.toml +++ b/gix-protocol/Cargo.toml @@ -86,6 +86,7 @@ gix-credentials = { version = "^0.35.0", path = "../gix-credentials", optional = gix-refspec = { version = "^0.36.0", path = "../gix-refspec", optional = true } gix-lock = { version = "^21.0.0", path = "../gix-lock", optional = true } +gix-error = { version = "^0.0.0", path = "../gix-error" } thiserror = "2.0.18" serde = { version = "1.0.114", optional = true, default-features = false, features = [ "derive", diff --git a/gix-protocol/src/handshake/refs/mod.rs b/gix-protocol/src/handshake/refs/mod.rs index 6cc90304e5..c46ae94a3a 100644 --- a/gix-protocol/src/handshake/refs/mod.rs +++ b/gix-protocol/src/handshake/refs/mod.rs @@ -14,8 +14,8 @@ pub mod parse { Io(#[from] std::io::Error), #[error(transparent)] DecodePacketline(#[from] gix_transport::packetline::decode::Error), - #[error(transparent)] - Id(#[from] gix_hash::decode::Error), + #[error("Could not parse object id")] + Id(#[source] gix_error::Error), #[error("{symref:?} could not be parsed. A symref is expected to look like :.")] MalformedSymref { symref: BString }, #[error("{0:?} could not be parsed. A V1 ref line should be ' '.")] diff --git a/gix-protocol/src/handshake/refs/shared.rs b/gix-protocol/src/handshake/refs/shared.rs index 7249110b17..5eeede959c 100644 --- a/gix-protocol/src/handshake/refs/shared.rs +++ b/gix-protocol/src/handshake/refs/shared.rs @@ -160,18 +160,18 @@ pub(in crate::handshake::refs) fn parse_v1( out_refs.push(InternalRef::Peeled { path: previous_path, tag, - object: gix_hash::ObjectId::from_hex(hex_hash.as_bytes())?, + object: gix_hash::ObjectId::from_hex(hex_hash.as_bytes()).map_err(|e| Error::Id(e.into_error()))?, }); } None => { let object = match gix_hash::ObjectId::from_hex(hex_hash.as_bytes()) { Ok(id) => id, Err(_) if hex_hash.as_bstr() == "shallow" => { - let id = gix_hash::ObjectId::from_hex(path)?; + let id = gix_hash::ObjectId::from_hex(path).map_err(|e| Error::Id(e.into_error()))?; out_shallow.push(ShallowUpdate::Shallow(id)); return Ok(()); } - Err(err) => return Err(err.into()), + Err(err) => return Err(Error::Id(err.into_error())), }; match out_refs .iter() @@ -205,7 +205,7 @@ pub(in crate::handshake::refs) fn parse_v2(line: &BStr) -> Result { let id = if hex_hash == b"unborn" { None } else { - Some(gix_hash::ObjectId::from_hex(hex_hash.as_bytes())?) + Some(gix_hash::ObjectId::from_hex(hex_hash.as_bytes()).map_err(|e| Error::Id(e.into_error()))?) }; if path.is_empty() { return Err(Error::MalformedV2RefLine(trimmed.to_owned().into())); @@ -221,7 +221,10 @@ pub(in crate::handshake::refs) fn parse_v2(line: &BStr) -> Result { } match attribute { b"peeled" => { - peeled = Some(gix_hash::ObjectId::from_hex(value.as_bytes())?); + peeled = Some( + gix_hash::ObjectId::from_hex(value.as_bytes()) + .map_err(|e| Error::Id(e.into_error()))?, + ); } b"symref-target" => { symref_target = Some(value); diff --git a/gix-quote/Cargo.toml b/gix-quote/Cargo.toml index 92b156d4cb..fede83f1b8 100644 --- a/gix-quote/Cargo.toml +++ b/gix-quote/Cargo.toml @@ -18,4 +18,4 @@ doctest = false gix-utils = { version = "^0.3.1", path = "../gix-utils" } bstr = { version = "1.12.0", default-features = false, features = ["std"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } diff --git a/gix-quote/src/ansi_c.rs b/gix-quote/src/ansi_c.rs index ec29ccb297..01c0473cf4 100644 --- a/gix-quote/src/ansi_c.rs +++ b/gix-quote/src/ansi_c.rs @@ -1,30 +1,13 @@ /// pub mod undo { - use bstr::{BStr, BString}; - /// The error returned by [`ansi_c`][crate::ansi_c::undo()]. - #[derive(Debug, thiserror::Error)] - #[allow(missing_docs)] - pub enum Error { - #[error("{message}: {input:?}")] - InvalidInput { message: String, input: BString }, - #[error("Invalid escaped value {byte} in input {input:?}")] - UnsupportedEscapeByte { byte: u8, input: BString }, - } - - impl Error { - pub(crate) fn new(message: impl ToString, input: &BStr) -> Error { - Error::InvalidInput { - message: message.to_string(), - input: input.into(), - } - } - } + pub type Error = gix_error::Exn; } use std::{borrow::Cow, io::Read}; use bstr::{BStr, BString, ByteSlice}; +use gix_error::{message, ErrorExt}; /// Unquote the given ansi-c quoted `input` string, returning it and all of the consumed bytes. /// @@ -40,20 +23,21 @@ pub fn undo(input: &BStr) -> Result<(Cow<'_, BStr>, usize), undo::Error> { return Ok((input.into(), input.len())); } if input.len() < 2 { - return Err(undo::Error::new("Input must be surrounded by double quotes", input)); + return Err(message!("Input must be surrounded by double quotes: {input:?}").raise()); } let original = input.as_bstr(); let mut input = &input[1..]; let mut consumed = 1; let mut out = BString::default(); fn consume_one_past(input: &mut &BStr, position: usize) -> Result { + use gix_error::{message, ErrorExt}; *input = input .get(position + 1..) - .ok_or_else(|| undo::Error::new("Unexpected end of input", input))? + .ok_or_else(|| message!("Unexpected end of input: {input:?}").raise())? .as_bstr(); let next = *input .first() - .ok_or_else(|| undo::Error::new("Unexpected end of input", input))?; + .ok_or_else(|| message!("Unexpected end of input: {input:?}").raise())?; *input = input.get(1..).unwrap_or_default().as_bstr(); Ok(next) } @@ -82,25 +66,20 @@ pub fn undo(input: &BStr) -> Result<(Cow<'_, BStr>, usize), undo::Error> { input .get(..2) .ok_or_else(|| { - undo::Error::new( - "Unexpected end of input when fetching two more octal bytes", - input, + message!( + "Unexpected end of input when fetching two more octal bytes: {input:?}" ) + .raise() })? .read_exact(&mut buf[1..]) .expect("impossible to fail as numbers match"); let byte = gix_utils::btoi::to_unsigned_with_radix(&buf, 8) - .map_err(|e| undo::Error::new(e, original))?; + .map_err(|e| message!("{e}: {original:?}").raise())?; out.push(byte); input = &input[2..]; consumed += 2; } - _ => { - return Err(undo::Error::UnsupportedEscapeByte { - byte: next, - input: original.into(), - }) - } + _ => return Err(message!("Invalid escaped value {next} in input {original:?}").raise()), } } _ => unreachable!("cannot find character that we didn't search for"), diff --git a/gix-ref/Cargo.toml b/gix-ref/Cargo.toml index 84ddaa0439..ad43a59df0 100644 --- a/gix-ref/Cargo.toml +++ b/gix-ref/Cargo.toml @@ -31,6 +31,7 @@ gix-validate = { version = "^0.11.0", path = "../gix-validate" } gix-actor = { version = "^0.38.0", path = "../gix-actor" } gix-lock = { version = "^21.0.0", path = "../gix-lock" } gix-tempfile = { version = "^21.0.0", default-features = false, path = "../gix-tempfile" } +gix-error = { version = "^0.0.0", path = "../gix-error" } thiserror = "2.0.18" winnow = { version = "0.7.14", features = ["simd"] } diff --git a/gix-ref/src/store/file/overlay_iter.rs b/gix-ref/src/store/file/overlay_iter.rs index 7a2368ceba..bb51dec4cd 100644 --- a/gix-ref/src/store/file/overlay_iter.rs +++ b/gix-ref/src/store/file/overlay_iter.rs @@ -412,7 +412,10 @@ impl file::Store { } Some(namespace) => { let prefix = namespace.to_owned().into_namespaced_prefix(prefix); - let prefix = prefix.as_bstr().try_into().map_err(std::io::Error::other)?; + let prefix = prefix + .as_bstr() + .try_into() + .map_err(|e: gix_error::Exn| std::io::Error::other(e.into_error()))?; let git_dir_info = IterInfo::from_prefix(self.git_dir(), prefix, self.precompose_unicode)?; let common_dir_info = self .common_dir() diff --git a/gix-refspec/tests/refspec/matching.rs b/gix-refspec/tests/refspec/matching.rs index bbe305c09d..1414855bdf 100644 --- a/gix-refspec/tests/refspec/matching.rs +++ b/gix-refspec/tests/refspec/matching.rs @@ -209,7 +209,7 @@ pub mod baseline { continue; } let mut tokens = line.splitn(2, |b| *b == b'\t'); - let target = ObjectId::from_hex(tokens.next().expect("hex-sha"))?; + let target = ObjectId::from_hex(tokens.next().expect("hex-sha")).map_err(gix_error::Exn::into_error)?; let name = tokens.next().expect("name"); if !name.ends_with(b"^{}") { out.push(Ref { diff --git a/gix-status/Cargo.toml b/gix-status/Cargo.toml index a2e5c1ed8f..d4cf330cc1 100644 --- a/gix-status/Cargo.toml +++ b/gix-status/Cargo.toml @@ -23,6 +23,7 @@ worktree-rewrites = ["dep:gix-dir", "dep:gix-diff"] gix-index = { version = "^0.46.0", path = "../gix-index" } gix-fs = { version = "^0.19.0", path = "../gix-fs" } gix-hash = { version = "^0.22.0", path = "../gix-hash" } +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-object = { version = "^0.55.0", path = "../gix-object" } gix-path = { version = "^0.11.0", path = "../gix-path" } gix-features = { version = "^0.46.0", path = "../gix-features", features = ["progress"] } diff --git a/gix-status/src/index_as_worktree/function.rs b/gix-status/src/index_as_worktree/function.rs index 9e3f632049..d921b5d702 100644 --- a/gix-status/src/index_as_worktree/function.rs +++ b/gix-status/src/index_as_worktree/function.rs @@ -371,7 +371,7 @@ impl<'index> State<'_, 'index> { Err(err) if gix_fs::io_err::is_not_found(err.kind(), err.raw_os_error()) => { return Ok(Some(Change::Removed.into())) } - Err(err) => return Err(Error::Io(err.into())), + Err(err) => return Err(Error::Io(err)), }; self.symlink_metadata_calls.fetch_add(1, Ordering::Relaxed); let metadata = match gix_index::fs::Metadata::from_path_no_follow(worktree_path) { @@ -397,7 +397,7 @@ impl<'index> State<'_, 'index> { return Ok(Some(Change::Removed.into())) } Err(err) => { - return Err(Error::Io(err.into())); + return Err(Error::Io(err)); } }; if entry.flags.contains(gix_index::entry::Flags::INTENT_TO_ADD) { @@ -568,9 +568,8 @@ where let is_symlink = self.entry.mode == gix_index::entry::Mode::SYMLINK; // TODO: what to do about precompose unicode and ignore_case for symlinks let out = if is_symlink && self.core_symlinks { - let symlink_path = gix_path::to_unix_separators_on_windows(gix_path::into_bstr( - std::fs::read_link(self.path).map_err(gix_hash::io::Error::from)?, - )); + let symlink_path = + gix_path::to_unix_separators_on_windows(gix_path::into_bstr(std::fs::read_link(self.path)?)); self.buf.extend_from_slice(&symlink_path); self.worktree_bytes.fetch_add(self.buf.len() as u64, Ordering::Relaxed); Stream { @@ -582,9 +581,8 @@ where self.buf.clear(); let platform = self .attr_stack - .at_entry(self.rela_path, Some(self.entry.mode), &self.objects) - .map_err(gix_hash::io::Error::from)?; - let file = std::fs::File::open(self.path).map_err(gix_hash::io::Error::from)?; + .at_entry(self.rela_path, Some(self.entry.mode), &self.objects)?; + let file = std::fs::File::open(self.path)?; let out = self .filter .convert_to_git( @@ -595,7 +593,7 @@ where }, &mut |buf| Ok(self.objects.find_blob(self.id, buf).map(|_| Some(()))?), ) - .map_err(|err| Error::Io(io::Error::other(err).into()))?; + .map_err(|err| Error::Io(io::Error::other(err)))?; let len = match out { ToGitOutcome::Unchanged(_) => Some(self.file_len), ToGitOutcome::Process(_) | ToGitOutcome::Buffer(_) => None, diff --git a/gix-status/src/index_as_worktree/traits.rs b/gix-status/src/index_as_worktree/traits.rs index 1d5f2d6894..6d74355810 100644 --- a/gix-status/src/index_as_worktree/traits.rs +++ b/gix-status/src/index_as_worktree/traits.rs @@ -143,15 +143,15 @@ impl CompareBlobs for HashEq { match stream.as_bytes() { Some(buffer) => { let file_hash = gix_object::compute_hash(entry.id.kind(), gix_object::Kind::Blob, buffer) - .map_err(gix_hash::io::Error::from)?; + .map_err(|e| std::io::Error::other(e.into_error()))?; Ok((entry.id != file_hash).then_some(file_hash)) } None => { let file_hash = match stream.size() { None => { - stream.read_to_end(buf).map_err(gix_hash::io::Error::from)?; + stream.read_to_end(buf)?; gix_object::compute_hash(entry.id.kind(), gix_object::Kind::Blob, buf) - .map_err(gix_hash::io::Error::from)? + .map_err(|e| std::io::Error::other(e.into_error()))? } Some(len) => gix_object::compute_stream_hash( entry.id.kind(), @@ -160,7 +160,8 @@ impl CompareBlobs for HashEq { len, &mut gix_features::progress::Discard, &AtomicBool::default(), - )?, + ) + .map_err(|e| std::io::Error::other(e.into_error()))?, }; Ok((entry.id != file_hash).then_some(file_hash)) } diff --git a/gix-status/src/index_as_worktree/types.rs b/gix-status/src/index_as_worktree/types.rs index 26d2698141..4aaefd1ed4 100644 --- a/gix-status/src/index_as_worktree/types.rs +++ b/gix-status/src/index_as_worktree/types.rs @@ -12,7 +12,7 @@ pub enum Error { #[error("The clock was off when reading file related metadata after updating a file on disk")] Time(#[from] std::time::SystemTimeError), #[error("IO error while writing blob or reading file metadata or changing filetype")] - Io(#[from] gix_hash::io::Error), + Io(#[from] std::io::Error), #[error("Failed to obtain blob from object database")] Find(#[from] gix_object::find::existing_object::Error), #[error("Could not determine status for submodule at '{rela_path}'")] diff --git a/gix-status/src/index_as_worktree_with_renames/mod.rs b/gix-status/src/index_as_worktree_with_renames/mod.rs index b362bc858b..cce289573d 100644 --- a/gix-status/src/index_as_worktree_with_renames/mod.rs +++ b/gix-status/src/index_as_worktree_with_renames/mod.rs @@ -560,14 +560,16 @@ pub(super) mod function { &mut gix_features::progress::Discard, should_interrupt, ) - .map_err(Error::HashFile)?, + .map_err(|e| Error::HashFile(e.into()))?, ToGitOutcome::Buffer(buf) => gix_object::compute_hash(object_hash, gix_object::Kind::Blob, buf) - .map_err(|err| Error::HashFile(err.into()))?, + .map_err(|e| Error::HashFile(e.into()))?, ToGitOutcome::Process(mut stream) => { buf.clear(); - stream.read_to_end(buf).map_err(|err| Error::HashFile(err.into()))?; + stream + .read_to_end(buf) + .map_err(|e| Error::HashFile(gix_error::ErrorExt::raise(e).into()))?; gix_object::compute_hash(object_hash, gix_object::Kind::Blob, buf) - .map_err(|err| Error::HashFile(err.into()))? + .map_err(|e| Error::HashFile(e.into()))? } } } @@ -575,7 +577,7 @@ pub(super) mod function { let path = worktree_root.join(gix_path::from_bstr(rela_path)); let target = gix_path::into_bstr(std::fs::read_link(path).map_err(Error::ReadLink)?); gix_object::compute_hash(object_hash, gix_object::Kind::Blob, &target) - .map_err(|err| Error::HashFile(err.into()))? + .map_err(|e| Error::HashFile(e.into()))? } Kind::Directory | Kind::Repository => object_hash.null(), }) diff --git a/gix-status/src/index_as_worktree_with_renames/types.rs b/gix-status/src/index_as_worktree_with_renames/types.rs index d0e528c1e4..ecd6c0734b 100644 --- a/gix-status/src/index_as_worktree_with_renames/types.rs +++ b/gix-status/src/index_as_worktree_with_renames/types.rs @@ -19,7 +19,7 @@ pub enum Error { #[error("Could not open worktree file for reading")] OpenWorktreeFile(std::io::Error), #[error(transparent)] - HashFile(gix_hash::io::Error), + HashFile(gix_error::Error), #[error("Could not read worktree link content")] ReadLink(std::io::Error), #[error(transparent)] diff --git a/gix-transport/Cargo.toml b/gix-transport/Cargo.toml index 342d3000e2..f22774f51d 100644 --- a/gix-transport/Cargo.toml +++ b/gix-transport/Cargo.toml @@ -84,6 +84,7 @@ required-features = ["async-client"] [dependencies] gix-command = { version = "^0.7.0", path = "../gix-command" } +gix-error = { version = "^0.0.0", path = "../gix-error" } gix-features = { version = "^0.46.0", path = "../gix-features" } gix-url = { version = "^0.35.0", path = "../gix-url" } gix-sec = { version = "^0.13.0", path = "../gix-sec" } diff --git a/gix-transport/src/client/async_io/connect.rs b/gix-transport/src/client/async_io/connect.rs index cd5a3a7fda..ab57cde0df 100644 --- a/gix-transport/src/client/async_io/connect.rs +++ b/gix-transport/src/client/async_io/connect.rs @@ -13,9 +13,11 @@ pub(crate) mod function { pub async fn connect(url: Url, options: super::Options) -> Result, Error> where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { - let mut url = url.try_into().map_err(gix_url::parse::Error::from)?; + let mut url = url + .try_into() + .map_err(|err| Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()))?; Ok(match url.scheme { gix_url::Scheme::Git => { if url.user().is_some() { diff --git a/gix-transport/src/client/blocking_io/connect.rs b/gix-transport/src/client/blocking_io/connect.rs index ffc4c5b2a5..d51358852f 100644 --- a/gix-transport/src/client/blocking_io/connect.rs +++ b/gix-transport/src/client/blocking_io/connect.rs @@ -19,9 +19,11 @@ pub(crate) mod function { pub fn connect(url: Url, options: super::Options) -> Result, Error> where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { - let mut url = url.try_into().map_err(gix_url::parse::Error::from)?; + let mut url = url + .try_into() + .map_err(|err| Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()))?; Ok(match url.scheme { gix_url::Scheme::Ext(_) => return Err(Error::UnsupportedScheme(url.scheme)), gix_url::Scheme::File => { diff --git a/gix-transport/src/client/non_io_types.rs b/gix-transport/src/client/non_io_types.rs index 98f79020a4..f992c3320d 100644 --- a/gix-transport/src/client/non_io_types.rs +++ b/gix-transport/src/client/non_io_types.rs @@ -52,7 +52,7 @@ pub(crate) mod connect { #[allow(missing_docs)] pub enum Error { #[error(transparent)] - Url(#[from] gix_url::parse::Error), + Url(#[from] gix_error::Error), #[error("The git repository path could not be converted to UTF8")] PathConversion(#[from] bstr::Utf8Error), #[error("connection failed")] diff --git a/gix-url/Cargo.toml b/gix-url/Cargo.toml index 02ae42310e..0b1a454db6 100644 --- a/gix-url/Cargo.toml +++ b/gix-url/Cargo.toml @@ -22,14 +22,13 @@ serde = ["dep:serde", "bstr/serde"] gix-path = { version = "^0.11.0", path = "../gix-path" } serde = { version = "1.0.114", optional = true, default-features = false, features = ["std", "derive"] } -thiserror = "2.0.18" +gix-error = { version = "^0.0.0", path = "../gix-error" } bstr = { version = "1.12.0", default-features = false, features = ["std"] } percent-encoding = "2.3.1" document-features = { version = "0.2.0", optional = true } [dev-dependencies] -assert_matches = "1.5.0" gix-testtools = { path = "../tests/tools" } [package.metadata.docs.rs] diff --git a/gix-url/src/expand_path.rs b/gix-url/src/expand_path.rs index e62a8b51e4..c56121e953 100644 --- a/gix-url/src/expand_path.rs +++ b/gix-url/src/expand_path.rs @@ -2,6 +2,7 @@ use std::path::{Path, PathBuf}; use bstr::{BStr, BString, ByteSlice}; +use gix_error::ErrorExt; /// Whether a repository is resolving for the current user, or the given one. #[derive(PartialEq, Eq, Debug, Hash, Ord, PartialOrd, Clone)] @@ -23,14 +24,8 @@ impl From for Option { } /// The error used by [`parse()`], [`with()`] and [`expand_path()`](crate::expand_path()). -#[derive(Debug, thiserror::Error)] #[allow(missing_docs)] -pub enum Error { - #[error("UTF8 conversion on non-unix system failed for path: {path:?}")] - IllformedUtf8 { path: BString }, - #[error("Home directory could not be obtained for {}", match user {Some(user) => format!("user '{user}'"), None => "current user".into()})] - MissingHome { user: Option }, -} +pub type Error = gix_error::Exn; fn path_segments(path: &BStr) -> Option> { if path.starts_with(b"/") { @@ -102,11 +97,16 @@ pub fn with( fn make_relative(path: &Path) -> PathBuf { path.components().skip(1).collect() } - let path = gix_path::try_from_byte_slice(path).map_err(|_| Error::IllformedUtf8 { path: path.to_owned() })?; + let path = gix_path::try_from_byte_slice(path) + .map_err(|_| gix_error::message!("UTF8 conversion on non-unix system failed for path: {path:?}").raise())?; Ok(match user { Some(user) => home_for_user(user) - .ok_or_else(|| Error::MissingHome { - user: user.to_owned().into(), + .ok_or_else(|| { + let user_desc = match >>::into(user.to_owned()) { + Some(user) => format!("user '{user}'"), + None => "current user".into(), + }; + gix_error::message!("Home directory could not be obtained for {user_desc}").raise() })? .join(make_relative(path)), None => path.into(), diff --git a/gix-url/src/parse.rs b/gix-url/src/parse.rs index ceaedc5c45..129e211c45 100644 --- a/gix-url/src/parse.rs +++ b/gix-url/src/parse.rs @@ -1,39 +1,11 @@ -use std::convert::Infallible; - use bstr::{BStr, BString, ByteSlice}; +use gix_error::{message, ErrorExt, ResultExt}; use crate::Scheme; /// The error returned by [parse()](crate::parse()). -#[derive(Debug, thiserror::Error)] #[allow(missing_docs)] -pub enum Error { - #[error("{} \"{url}\" is not valid UTF-8", kind.as_str())] - Utf8 { - url: BString, - kind: UrlKind, - source: std::str::Utf8Error, - }, - #[error("{} {url:?} can not be parsed as valid URL", kind.as_str())] - Url { - url: String, - kind: UrlKind, - source: crate::simple_url::UrlParseError, - }, - - #[error("The host portion of the following URL is too long ({} bytes, {len} bytes total): {truncated_url:?}", truncated_url.len())] - TooLong { truncated_url: BString, len: usize }, - #[error("{} \"{url}\" does not specify a path to a repository", kind.as_str())] - MissingRepositoryPath { url: BString, kind: UrlKind }, - #[error("URL {url:?} is relative which is not allowed in this context")] - RelativeUrl { url: String }, -} - -impl From for Error { - fn from(_: Infallible) -> Self { - unreachable!("Cannot actually happen, but it seems there can't be a blanket impl for this") - } -} +pub type Error = gix_error::Exn; /// #[derive(Debug, Clone, Copy)] @@ -108,19 +80,23 @@ pub(crate) fn url(input: &BStr, protocol_end: usize) -> Result MAX_LEN || protocol_end > MAX_LEN { - return Err(Error::TooLong { - truncated_url: input[..(protocol_end + "://".len() + MAX_LEN).min(input.len())].into(), - len: input.len(), - }); + let truncated_url: bstr::BString = input[..(protocol_end + "://".len() + MAX_LEN).min(input.len())].into(); + let len = input.len(); + return Err(message!( + "The host portion of the following URL is too long ({} bytes, {len} bytes total): {truncated_url:?}", + truncated_url.len() + ) + .raise()); } let (input, url) = input_to_utf8_and_url(input, UrlKind::Url)?; let scheme = Scheme::from(url.scheme.as_str()); if matches!(scheme, Scheme::Git | Scheme::Ssh) && url.path.is_empty() { - return Err(Error::MissingRepositoryPath { - url: input.into(), - kind: UrlKind::Url, - }); + return Err(message!( + "{} \"{input}\" does not specify a path to a repository", + UrlKind::Url.as_str() + ) + .raise()); } // Normalize empty path to "/" for http/https URLs only @@ -188,10 +164,12 @@ pub(crate) fn scp(input: &BStr, colon: usize) -> Result { let path = &path[1..]; if path.is_empty() { - return Err(Error::MissingRepositoryPath { - url: input.to_owned().into(), - kind: UrlKind::Scp, - }); + return Err(message!( + "{} \"{}\" does not specify a path to a repository", + UrlKind::Scp.as_str(), + input + ) + .raise()); } // The path returned by the parsed url often has the wrong number of leading `/` characters but @@ -199,11 +177,8 @@ pub(crate) fn scp(input: &BStr, colon: usize) -> Result { // To avoid the various off-by-one errors caused by the `/` characters, we keep using the path // determined above and can therefore skip parsing it here as well. let url_string = format!("ssh://{host}"); - let url = crate::simple_url::ParsedUrl::parse(&url_string).map_err(|source| Error::Url { - url: input.to_owned(), - kind: UrlKind::Scp, - source, - })?; + let url = crate::simple_url::ParsedUrl::parse(&url_string) + .or_raise(|| message!("{} {:?} can not be parsed as valid URL", UrlKind::Scp.as_str(), input))?; // For SCP-like SSH URLs, strip leading '/' from paths starting with '/~' // e.g., "user@host:/~repo" -> path is "~repo", not "/~repo" @@ -245,10 +220,12 @@ pub(crate) fn file_url(input: &BStr, protocol_colon: usize) -> Result Result Result { if input.is_empty() { - return Err(Error::MissingRepositoryPath { - url: input.to_owned(), - kind: UrlKind::Local, - }); + return Err(message!( + "{} \"{}\" does not specify a path to a repository", + UrlKind::Local.as_str(), + input + ) + .raise()); } Ok(crate::Url { @@ -315,11 +294,7 @@ pub(crate) fn local(input: &BStr) -> Result { } fn input_to_utf8(input: &BStr, kind: UrlKind) -> Result<&str, Error> { - std::str::from_utf8(input).map_err(|source| Error::Utf8 { - url: input.to_owned(), - kind, - source, - }) + std::str::from_utf8(input).or_raise(|| message!("{} \"{}\" is not valid UTF-8", kind.as_str(), input)) } fn input_to_utf8_and_url(input: &BStr, kind: UrlKind) -> Result<(&str, crate::simple_url::ParsedUrl), Error> { @@ -327,17 +302,12 @@ fn input_to_utf8_and_url(input: &BStr, kind: UrlKind) -> Result<(&str, crate::si crate::simple_url::ParsedUrl::parse(input) .map(|url| (input, url)) .map_err(|source| { - // If the parser rejected it as RelativeUrlWithoutBase, map to Error::RelativeUrl + // If the parser rejected it as "relative URL without a base", map to RelativeUrl // to match the expected error type for malformed URLs like "invalid:://" - match source { - crate::simple_url::UrlParseError::RelativeUrlWithoutBase => { - Error::RelativeUrl { url: input.to_owned() } - } - _ => Error::Url { - url: input.to_owned(), - kind, - source, - }, + if source.to_string().contains("relative URL without a base") { + message!("URL {:?} is relative which is not allowed in this context", input).raise() + } else { + source.raise(message!("{} {:?} can not be parsed as valid URL", kind.as_str(), input)) } }) } diff --git a/gix-url/src/simple_url.rs b/gix-url/src/simple_url.rs index 3a9d2fc166..aacda80c1d 100644 --- a/gix-url/src/simple_url.rs +++ b/gix-url/src/simple_url.rs @@ -1,3 +1,4 @@ +use gix_error::{message, ErrorExt}; use percent_encoding::percent_decode_str; /// A minimal URL parser that extracts only what we need for git URLs. @@ -13,18 +14,8 @@ pub(crate) struct ParsedUrl { } /// Minimal parse error type to replace url::ParseError -#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] #[allow(missing_docs)] -pub enum UrlParseError { - #[error("relative URL without a base")] - RelativeUrlWithoutBase, - #[error("invalid port number - must be between 1-65535")] - InvalidPort, - #[error("invalid domain character")] - InvalidDomainCharacter, - #[error("Scheme requires host")] - SchemeRequiresHost, -} +pub type UrlParseError = gix_error::Exn; /// Check if a character is valid in a URL scheme. /// Valid scheme characters: alphanumeric, +, -, or . @@ -38,7 +29,7 @@ fn percent_decode(s: &str) -> Result { percent_decode_str(s) .decode_utf8() .map(std::borrow::Cow::into_owned) - .map_err(|_| UrlParseError::InvalidDomainCharacter) + .map_err(|_| message("invalid domain character").raise()) } impl ParsedUrl { @@ -47,26 +38,28 @@ impl ParsedUrl { pub(crate) fn parse(input: &str) -> Result { // Validate that the entire URL doesn't contain any whitespace (per RFC 3986) if input.chars().any(char::is_whitespace) { - return Err(UrlParseError::InvalidDomainCharacter); + return Err(message("invalid domain character").raise()); } // Find scheme by looking for first ':' - let first_colon = input.find(':').ok_or(UrlParseError::RelativeUrlWithoutBase)?; + let first_colon = input + .find(':') + .ok_or_else(|| message("relative URL without a base").raise())?; let scheme_str = &input[..first_colon]; // Normalize scheme to lowercase for case-insensitive matching (matches url crate behavior) let scheme = scheme_str.to_ascii_lowercase(); let Some(after_scheme) = input[first_colon..].strip_prefix("://") else { - return Err(UrlParseError::RelativeUrlWithoutBase); + return Err(message("relative URL without a base").raise()); }; // Check for relative URL (scheme without proper authority) if scheme_str.is_empty() { - return Err(UrlParseError::RelativeUrlWithoutBase); + return Err(message("relative URL without a base").raise()); } // Validate scheme characters (check original before lowercase conversion) if !scheme_str.chars().all(is_valid_scheme_char) { - return Err(UrlParseError::RelativeUrlWithoutBase); + return Err(message("relative URL without a base").raise()); } // Find path start (first '/' after scheme) @@ -98,7 +91,7 @@ impl ParsedUrl { let (h, p) = Self::parse_host_port(host_port)?; // If we have user info, we must have a host if h.is_none() { - return Err(UrlParseError::InvalidDomainCharacter); + return Err(message("invalid domain character").raise()); } (user, pass, h, p) } else { @@ -111,7 +104,7 @@ impl ParsedUrl { // Scheme is already lowercase at this point let requires_host = matches!(scheme.as_str(), "http" | "https" | "git" | "ssh" | "ftp" | "ftps"); if requires_host && host.is_none() { - return Err(UrlParseError::SchemeRequiresHost); + return Err(message("Scheme requires host").raise()); } Ok(ParsedUrl { @@ -144,19 +137,21 @@ impl ParsedUrl { let host = Some(host_port.to_ascii_lowercase()); return Ok((host, None)); } - let port = port_str.parse::().map_err(|_| UrlParseError::InvalidPort)?; + let port = port_str + .parse::() + .map_err(|_| message("invalid port number - must be between 1-65535").raise())?; // Validate port is in valid range (1-65535, port 0 is invalid) if port == 0 { - return Err(UrlParseError::InvalidPort); + return Err(message("invalid port number - must be between 1-65535").raise()); } // IPv6 addresses are case-insensitive, normalize to lowercase let host = Some(host_port[..=bracket_end].to_ascii_lowercase()); return Ok((host, Some(port))); } else { - return Err(UrlParseError::InvalidDomainCharacter); + return Err(message("invalid domain character").raise()); } } else { - return Err(UrlParseError::InvalidDomainCharacter); + return Err(message("invalid domain character").raise()); } } @@ -182,10 +177,10 @@ impl ParsedUrl { let host = Self::normalize_hostname(before_last_colon)?; let port = after_last_colon .parse::() - .map_err(|_| UrlParseError::InvalidPort)?; + .map_err(|_| message("invalid port number - must be between 1-65535").raise())?; // Validate port is in valid range (1-65535, port 0 is invalid) if port == 0 { - return Err(UrlParseError::InvalidPort); + return Err(message("invalid port number - must be between 1-65535").raise()); } return Ok((Some(host), Some(port))); } @@ -211,7 +206,7 @@ impl ParsedUrl { // Reject invalid characters: ?, space, tab, newline, etc. // These characters are forbidden in URLs per RFC 3986 if host.chars().any(|c| c == '?' || c.is_whitespace()) { - return Err(UrlParseError::InvalidDomainCharacter); + return Err(message("invalid domain character").raise()); } // Only normalize if it looks like a valid DNS hostname diff --git a/gix-url/tests/url/access.rs b/gix-url/tests/url/access.rs index 262926f2af..2a8f3aa886 100644 --- a/gix-url/tests/url/access.rs +++ b/gix-url/tests/url/access.rs @@ -3,27 +3,38 @@ mod canonicalized { #[test] fn non_file_scheme_is_noop() -> crate::Result { - let url = gix_url::parse("https://github.com/byron/gitoxide".into())?; - assert_eq!(url.canonicalized(&std::env::current_dir()?)?, url); + let url = gix_url::parse("https://github.com/byron/gitoxide".into()).map_err(gix_error::Exn::into_error)?; + assert_eq!( + url.canonicalized(&std::env::current_dir()?) + .map_err(gix_error::Exn::into_error)?, + url + ); Ok(()) } #[test] fn absolute_file_url_does_nothing() -> crate::Result { #[cfg(not(windows))] - let url = gix_url::parse("/this/path/does/not/exist".into())?; + let url = gix_url::parse("/this/path/does/not/exist".into()).map_err(gix_error::Exn::into_error)?; #[cfg(windows)] - let url = gix_url::parse(r"C:\non\existing".into())?; - assert_eq!(url.canonicalized(&std::env::current_dir()?)?, url); + let url = gix_url::parse(r"C:\non\existing".into()).map_err(gix_error::Exn::into_error)?; + assert_eq!( + url.canonicalized(&std::env::current_dir()?) + .map_err(gix_error::Exn::into_error)?, + url + ); Ok(()) } #[test] fn file_that_is_current_dir_is_absolutized() -> crate::Result { - let url = gix_url::parse(".".into())?; + let url = gix_url::parse(".".into()).map_err(gix_error::Exn::into_error)?; assert!(gix_path::from_bstr(Cow::Borrowed(url.path.as_ref())).is_relative()); assert!(gix_path::from_bstr(Cow::Borrowed( - url.canonicalized(&std::env::current_dir()?)?.path.as_ref() + url.canonicalized(&std::env::current_dir()?) + .map_err(gix_error::Exn::into_error)? + .path + .as_ref() )) .is_absolute()); Ok(()) @@ -34,7 +45,7 @@ use gix_url::ArgumentSafety; #[test] fn user() -> crate::Result { - let mut url = gix_url::parse("https://user:password@host/path".into())?; + let mut url = gix_url::parse("https://user:password@host/path".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), Some("user")); assert_eq!(url.set_user(Some("new-user".into())), Some("user".into())); @@ -45,7 +56,7 @@ fn user() -> crate::Result { #[test] fn password() -> crate::Result { - let mut url = gix_url::parse("https://user:password@host/path".into())?; + let mut url = gix_url::parse("https://user:password@host/path".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.password(), Some("password")); assert_eq!(url.set_password(Some("new-pass".into())), Some("password".into())); @@ -56,12 +67,12 @@ fn password() -> crate::Result { #[test] fn mutation_roundtrip() -> crate::Result { - let mut url = gix_url::parse("https://user@host/path".into())?; + let mut url = gix_url::parse("https://user@host/path".into()).map_err(gix_error::Exn::into_error)?; url.set_user(Some("newuser".into())); url.set_password(Some("secret".into())); let serialized = url.to_bstring(); - let reparsed = gix_url::parse(serialized.as_ref())?; + let reparsed = gix_url::parse(serialized.as_ref()).map_err(gix_error::Exn::into_error)?; assert_eq!(url, reparsed); assert_eq!(reparsed.user(), Some("newuser")); @@ -73,10 +84,10 @@ fn mutation_roundtrip() -> crate::Result { #[test] fn from_bytes_roundtrip() -> crate::Result { let original = "https://user:password@example.com:8080/path/to/repo"; - let url = gix_url::parse(original.into())?; + let url = gix_url::parse(original.into()).map_err(gix_error::Exn::into_error)?; let bytes = url.to_bstring(); - let from_bytes = gix_url::Url::from_bytes(bytes.as_ref())?; + let from_bytes = gix_url::Url::from_bytes(bytes.as_ref()).map_err(gix_error::Exn::into_error)?; assert_eq!(url, from_bytes); assert_eq!(from_bytes.to_bstring(), bytes); @@ -86,9 +97,9 @@ fn from_bytes_roundtrip() -> crate::Result { #[test] fn from_bytes_with_non_utf8_path() -> crate::Result { - let url = gix_url::parse(b"/path/to\xff/repo".as_slice().into())?; + let url = gix_url::parse(b"/path/to\xff/repo".as_slice().into()).map_err(gix_error::Exn::into_error)?; let bytes = url.to_bstring(); - let from_bytes = gix_url::Url::from_bytes(bytes.as_ref())?; + let from_bytes = gix_url::Url::from_bytes(bytes.as_ref()).map_err(gix_error::Exn::into_error)?; assert_eq!(url, from_bytes); assert_eq!(from_bytes.path, url.path); @@ -98,7 +109,7 @@ fn from_bytes_with_non_utf8_path() -> crate::Result { #[test] fn user_argument_safety() -> crate::Result { - let url = gix_url::parse("ssh://-Fconfigfile@foo/bar".into())?; + let url = gix_url::parse("ssh://-Fconfigfile@foo/bar".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), Some("-Fconfigfile")); assert_eq!(url.user_as_argument(), ArgumentSafety::Dangerous("-Fconfigfile")); @@ -116,7 +127,7 @@ fn user_argument_safety() -> crate::Result { #[test] fn host_argument_safety() -> crate::Result { - let url = gix_url::parse("ssh://-oProxyCommand=open$IFS-aCalculator/foo".into())?; + let url = gix_url::parse("ssh://-oProxyCommand=open$IFS-aCalculator/foo".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), None); assert_eq!(url.user_as_argument(), ArgumentSafety::Absent); @@ -141,7 +152,7 @@ fn host_argument_safety() -> crate::Result { #[test] fn path_argument_safety() -> crate::Result { - let url = gix_url::parse("ssh://foo/-oProxyCommand=open$IFS-aCalculator".into())?; + let url = gix_url::parse("ssh://foo/-oProxyCommand=open$IFS-aCalculator".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), None); assert_eq!(url.user_as_argument(), ArgumentSafety::Absent); @@ -163,7 +174,7 @@ fn path_argument_safety() -> crate::Result { #[test] fn all_argument_safety_safe() -> crate::Result { - let url = gix_url::parse("ssh://user.name@example.com/path/to/file".into())?; + let url = gix_url::parse("ssh://user.name@example.com/path/to/file".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), Some("user.name")); assert_eq!(url.user_as_argument(), ArgumentSafety::Usable("user.name")); @@ -182,7 +193,7 @@ fn all_argument_safety_safe() -> crate::Result { #[test] fn all_argument_safety_not_safe() -> crate::Result { let all_bad = "ssh://-Fconfigfile@-oProxyCommand=open$IFS-aCalculator/-oProxyCommand=open$IFS-aCalculator"; - let url = gix_url::parse(all_bad.into())?; + let url = gix_url::parse(all_bad.into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.user(), Some("-Fconfigfile")); assert_eq!(url.user_as_argument(), ArgumentSafety::Dangerous("-Fconfigfile")); diff --git a/gix-url/tests/url/expand_path.rs b/gix-url/tests/url/expand_path.rs index 88ae4cbed3..0939018d34 100644 --- a/gix-url/tests/url/expand_path.rs +++ b/gix-url/tests/url/expand_path.rs @@ -26,22 +26,24 @@ fn user_home(name: &str) -> std::path::PathBuf { #[test] fn without_username() -> crate::Result { - let (user, resolved_path) = expand_path::parse(b"/~/hello/git".as_bstr())?; + let (user, resolved_path) = expand_path::parse(b"/~/hello/git".as_bstr()).map_err(gix_error::Exn::into_error)?; let resolved_path = expand_path::with(user.as_ref(), resolved_path.as_ref(), |user: &ForUser| match user { ForUser::Current => Some(user_home("byron")), ForUser::Name(name) => Some(format!("/home/{name}").into()), - })?; + }) + .map_err(gix_error::Exn::into_error)?; assert_eq!(resolved_path, expected_path()); Ok(()) } #[test] fn with_username() -> crate::Result { - let (user, resolved_path) = expand_path::parse(b"/~byron/hello/git".as_bstr())?; + let (user, resolved_path) = expand_path::parse(b"/~byron/hello/git".as_bstr()).map_err(gix_error::Exn::into_error)?; let resolved_path = expand_path::with(user.as_ref(), resolved_path.as_ref(), |user: &ForUser| match user { ForUser::Current => unreachable!("we have a name"), ForUser::Name(name) => Some(user_home(name.to_str_lossy().as_ref())), - })?; + }) + .map_err(gix_error::Exn::into_error)?; assert_eq!(resolved_path, expected_path()); Ok(()) } diff --git a/gix-url/tests/url/parse/file.rs b/gix-url/tests/url/parse/file.rs index 185194e13d..6656ba10e0 100644 --- a/gix-url/tests/url/parse/file.rs +++ b/gix-url/tests/url/parse/file.rs @@ -54,7 +54,7 @@ fn no_username_expansion_for_file_paths_with_protocol() -> crate::Result { #[test] fn non_utf8_file_path_without_protocol() -> crate::Result { - let url = gix_url::parse(b"/path/to\xff/git".as_bstr())?; + let url = gix_url::parse(b"/path/to\xff/git".as_bstr()).map_err(gix_error::Exn::into_error)?; assert_eq!(url, url_alternate(Scheme::File, None, None, None, b"/path/to\xff/git")); let url_lossless = url.to_bstring(); assert_eq!( @@ -102,15 +102,15 @@ fn no_relative_paths_if_protocol() -> crate::Result { assert_url_roundtrip("file://a/", url(Scheme::File, None, "a", None, b"/"))?; if cfg!(windows) { assert_eq!( - gix_url::parse(r"file://.\".into())?, + gix_url::parse(r"file://.\".into()).map_err(gix_error::Exn::into_error)?, url(Scheme::File, None, ".", None, br"\"), "we are just as none-sensical as git here due to special handling." ); } else { - assert_matches::assert_matches!( - gix_url::parse(r"file://.\".into()), - Err(gix_url::parse::Error::MissingRepositoryPath { .. }), - "DEVIATION: on windows, this parses with git into something nonsensical Diag: url=file://./ Diag: protocol=file Diag: hostandport=./ Diag: path=//./" + let err = gix_url::parse(r"file://.\".into()).unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "DEVIATION: on windows, this parses with git into something nonsensical Diag: url=file://./ Diag: protocol=file Diag: hostandport=./ Diag: path=//./; got error: {err}" ); } Ok(()) @@ -236,7 +236,8 @@ mod unix { None, b"/repo".into(), false, - )?, + ) + .map_err(gix_error::Exn::into_error)?, ) } diff --git a/gix-url/tests/url/parse/http.rs b/gix-url/tests/url/parse/http.rs index 33f0676270..2af5855347 100644 --- a/gix-url/tests/url/parse/http.rs +++ b/gix-url/tests/url/parse/http.rs @@ -12,7 +12,7 @@ fn username_expansion_is_unsupported() -> crate::Result { #[test] fn empty_user_cannot_roundtrip() -> crate::Result { - let actual = gix_url::parse("http://@example.com/~byron/hello".into())?; + let actual = gix_url::parse("http://@example.com/~byron/hello".into()).map_err(gix_error::Exn::into_error)?; let expected = url(Scheme::Http, None, "example.com", None, b"/~byron/hello"); assert_eq!(actual, expected); assert_eq!( @@ -49,7 +49,8 @@ fn username_and_password_with_spaces_and_port() -> crate::Result { Some(8080), b"/~byron/hello".into(), false, - )?; + ) + .map_err(gix_error::Exn::into_error)?; assert_url_roundtrip( "http://user%20name:password%20secret@example.com:8080/~byron/hello", expected.clone(), @@ -69,7 +70,7 @@ fn only_password() -> crate::Result { #[test] fn username_and_empty_password() -> crate::Result { - let actual = gix_url::parse("http://user:@example.com/~byron/hello".into())?; + let actual = gix_url::parse("http://user:@example.com/~byron/hello".into()).map_err(gix_error::Exn::into_error)?; let expected = url(Scheme::Http, "user", "example.com", None, b"/~byron/hello"); assert_eq!(actual, expected); assert_eq!( @@ -142,14 +143,14 @@ fn https_with_ipv6_user_and_port() -> crate::Result { #[test] fn percent_encoded_path() -> crate::Result { - let url = gix_url::parse("https://example.com/path/with%20spaces/file".into())?; + let url = gix_url::parse("https://example.com/path/with%20spaces/file".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.path, "/path/with spaces/file", "paths are now decoded"); Ok(()) } #[test] fn percent_encoded_international_path() -> crate::Result { - let url = gix_url::parse("https://example.com/caf%C3%A9".into())?; + let url = gix_url::parse("https://example.com/caf%C3%A9".into()).map_err(gix_error::Exn::into_error)?; assert_eq!(url.path, "/café", "international characters are decoded in path"); Ok(()) } diff --git a/gix-url/tests/url/parse/invalid.rs b/gix-url/tests/url/parse/invalid.rs index f403f76078..f5b3c2ef36 100644 --- a/gix-url/tests/url/parse/invalid.rs +++ b/gix-url/tests/url/parse/invalid.rs @@ -1,54 +1,89 @@ -use assert_matches::assert_matches; -use gix_url::parse::Error::*; - use crate::parse::parse; #[test] fn relative_path_due_to_double_colon() { - assert_matches!(parse("invalid:://host.xz/path/to/repo.git/"), Err(RelativeUrl { .. })); + let err = parse("invalid:://host.xz/path/to/repo.git/").unwrap_err(); + assert!( + err.to_string() + .contains("is relative which is not allowed in this context"), + "unexpected error: {err}" + ); } #[test] fn ssh_missing_path() { - assert_matches!(parse("ssh://host.xz"), Err(MissingRepositoryPath { .. })); + let err = parse("ssh://host.xz").unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error: {err}" + ); } #[test] fn git_missing_path() { - assert_matches!(parse("git://host.xz"), Err(MissingRepositoryPath { .. })); + let err = parse("git://host.xz").unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error: {err}" + ); } #[test] fn file_missing_path() { - assert_matches!(parse("file://"), Err(MissingRepositoryPath { .. })); + let err = parse("file://").unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error: {err}" + ); } #[test] fn empty_input() { - assert_matches!(parse(""), Err(MissingRepositoryPath { .. })); + let err = parse("").unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error: {err}" + ); } #[test] fn file_missing_host_path_separator() { - assert_matches!(parse("file://.."), Err(MissingRepositoryPath { .. })); - assert_matches!(parse("file://."), Err(MissingRepositoryPath { .. })); - assert_matches!(parse("file://a"), Err(MissingRepositoryPath { .. })); + for input in ["file://..", "file://.", "file://a"] { + let err = parse(input).unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error for {input:?}: {err}" + ); + } } #[test] fn missing_port_despite_indication() { - assert_matches!(parse("ssh://host.xz:"), Err(MissingRepositoryPath { .. })); + let err = parse("ssh://host.xz:").unwrap_err(); + assert!( + err.to_string().contains("does not specify a path to a repository"), + "unexpected error: {err}" + ); } #[test] fn port_zero_is_invalid() { - assert_matches!(parse("ssh://host.xz:0/path"), Err(Url { .. })); + let err = parse("ssh://host.xz:0/path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn port_too_large() { - assert_matches!(parse("ssh://host.xz:65536/path"), Err(Url { .. })); - assert_matches!(parse("ssh://host.xz:99999/path"), Err(Url { .. })); + for input in ["ssh://host.xz:65536/path", "ssh://host.xz:99999/path"] { + let err = parse(input).unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error for {input:?}: {err}" + ); + } } #[test] @@ -64,49 +99,85 @@ fn invalid_port_format() { #[test] fn host_with_space() { - assert_matches!(parse("http://has a space"), Err(Url { .. })); - assert_matches!(parse("http://has a space/path"), Err(Url { .. })); - assert_matches!(parse("https://example.com with space/path"), Err(Url { .. })); + for input in [ + "http://has a space", + "http://has a space/path", + "https://example.com with space/path", + ] { + let err = parse(input).unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error for {input:?}: {err}" + ); + } } #[test] fn url_with_space_in_path() { // Spaces in path should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://example.com/ path"), Err(Url { .. })); + let err = parse("http://example.com/ path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_space_in_username() { // Spaces in username should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://user name@example.com/path"), Err(Url { .. })); + let err = parse("http://user name@example.com/path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_space_in_password() { // Spaces in password should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://user:pass word@example.com/path"), Err(Url { .. })); + let err = parse("http://user:pass word@example.com/path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_tab_in_path() { // Tabs in path should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://example.com/\tpath"), Err(Url { .. })); + let err = parse("http://example.com/\tpath").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_newline_in_path() { // Newlines in path should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://example.com/\npath"), Err(Url { .. })); + let err = parse("http://example.com/\npath").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_tab_in_username() { // Tabs in username should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://user\tname@example.com/path"), Err(Url { .. })); + let err = parse("http://user\tname@example.com/path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } #[test] fn url_with_tab_in_password() { // Tabs in password should be rejected for http URLs per RFC 3986 - assert_matches!(parse("http://user:pass\tword@example.com/path"), Err(Url { .. })); + let err = parse("http://user:pass\tword@example.com/path").unwrap_err(); + assert!( + err.to_string().contains("can not be parsed as valid URL"), + "unexpected error: {err}" + ); } diff --git a/gix-url/tests/url/parse/mod.rs b/gix-url/tests/url/parse/mod.rs index ce77f15d89..48251b5d9d 100644 --- a/gix-url/tests/url/parse/mod.rs +++ b/gix-url/tests/url/parse/mod.rs @@ -2,7 +2,7 @@ use bstr::{BStr, ByteSlice}; use gix_url::{testing::TestUrlExtension, Scheme}; fn assert_url(url: &str, expected: gix_url::Url) -> Result { - let actual = gix_url::parse(url.into())?; + let actual = gix_url::parse(url.into()).map_err(gix_error::Exn::into_error)?; assert_eq!(actual, expected); if actual.scheme.as_str().starts_with("http") { assert!( diff --git a/gix-url/tests/url/parse/ssh.rs b/gix-url/tests/url/parse/ssh.rs index 4af89e3f85..25c1cc52ba 100644 --- a/gix-url/tests/url/parse/ssh.rs +++ b/gix-url/tests/url/parse/ssh.rs @@ -196,7 +196,7 @@ fn scp_like_with_windows_path() -> crate::Result { #[test] fn scp_like_with_windows_path_and_port_thinks_port_is_part_of_path() -> crate::Result { - let url = gix_url::parse("user@host.xz:42:C:/strange/absolute/path".into())?; + let url = gix_url::parse("user@host.xz:42:C:/strange/absolute/path".into()).map_err(gix_error::Exn::into_error)?; assert_eq!( url.to_bstring(), "user@host.xz:42:C:/strange/absolute/path", diff --git a/gix-worktree-stream/tests/stream.rs b/gix-worktree-stream/tests/stream.rs index 5f21584e24..9f8d3a5e55 100644 --- a/gix-worktree-stream/tests/stream.rs +++ b/gix-worktree-stream/tests/stream.rs @@ -235,7 +235,7 @@ mod from_tree { let head = { let hex = std::fs::read(dir.join("head.hex"))?; - gix_hash::ObjectId::from_hex(hex.trim())? + gix_hash::ObjectId::from_hex(hex.trim()).map_err(gix_error::Exn::into_error)? }; let odb = gix_odb::at(dir.join(".git").join("objects"))?; diff --git a/gix/examples/clone.rs b/gix/examples/clone.rs index 9f1441ed90..c4e8e971b0 100644 --- a/gix/examples/clone.rs +++ b/gix/examples/clone.rs @@ -19,7 +19,7 @@ fn main() -> anyhow::Result<()> { let url = gix::url::parse(repo_url.to_str().unwrap().into())?; println!("Url: {:?}", url.to_bstring()); - let mut prepare_clone = gix::prepare_clone(url, &dst)?; + let mut prepare_clone = gix::prepare_clone(repo_url.to_str().unwrap(), &dst)?; println!("Cloning {repo_url:?} into {dst:?}..."); let (mut prepare_checkout, _) = diff --git a/gix/src/config/mod.rs b/gix/src/config/mod.rs index 0c48bd3526..5f0db05c8b 100644 --- a/gix/src/config/mod.rs +++ b/gix/src/config/mod.rs @@ -481,7 +481,7 @@ pub mod unsigned_integer { /// pub mod url { /// The error produced when failing to parse a url from the configuration. - pub type Error = super::key::Error; + pub type Error = super::key::Error; } /// @@ -542,7 +542,7 @@ pub mod transport { source: crate::config::string::Error, }, #[error("Invalid URL passed for configuration")] - ParseUrl(#[from] gix_url::parse::Error), + ParseUrl(#[from] gix_error::Error), #[error("Could obtain configuration for an HTTP url")] Http(#[from] http::Error), } diff --git a/gix/src/config/tree/keys.rs b/gix/src/config/tree/keys.rs index 9e283c8035..78f6f74789 100644 --- a/gix/src/config/tree/keys.rs +++ b/gix/src/config/tree/keys.rs @@ -322,7 +322,7 @@ mod url { /// Try to parse `value` as URL. pub fn try_into_url(&'static self, value: Cow<'_, BStr>) -> Result { gix_url::parse(value.as_ref()) - .map_err(|err| config::url::Error::from_value(self, value.into_owned()).with_source(err)) + .map_err(|err| config::url::Error::from_value(self, value.into_owned()).with_source(err.into_error())) } } } @@ -594,7 +594,7 @@ pub mod validate { pub struct Url; impl Validate for Url { fn validate(&self, value: &BStr) -> Result<(), Box> { - gix_url::parse(value)?; + gix_url::parse(value).map_err(gix_error::Exn::into_error)?; Ok(()) } } diff --git a/gix/src/remote/build.rs b/gix/src/remote/build.rs index 23d14ef793..638f961edf 100644 --- a/gix/src/remote/build.rs +++ b/gix/src/remote/build.rs @@ -8,10 +8,12 @@ impl Remote<'_> { pub fn with_url(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.url_inner( - url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, + url.try_into().map_err(|err| { + remote::init::Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()) + })?, true, ) } @@ -23,10 +25,12 @@ impl Remote<'_> { pub fn with_url_without_url_rewrite(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.url_inner( - url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, + url.try_into().map_err(|err| { + remote::init::Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()) + })?, false, ) } @@ -36,7 +40,7 @@ impl Remote<'_> { pub fn push_url(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.with_push_url(url) } @@ -45,10 +49,12 @@ impl Remote<'_> { pub fn with_push_url(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.push_url_inner( - url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, + url.try_into().map_err(|err| { + remote::init::Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()) + })?, true, ) } @@ -59,7 +65,7 @@ impl Remote<'_> { pub fn push_url_without_url_rewrite(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.with_push_url_without_url_rewrite(url) } @@ -69,10 +75,12 @@ impl Remote<'_> { pub fn with_push_url_without_url_rewrite(self, url: Url) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { self.push_url_inner( - url.try_into().map_err(|err| remote::init::Error::Url(err.into()))?, + url.try_into().map_err(|err| { + remote::init::Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()) + })?, false, ) } diff --git a/gix/src/remote/connection/fetch/update_refs/tests.rs b/gix/src/remote/connection/fetch/update_refs/tests.rs index c5ab1ff112..851a9bedd4 100644 --- a/gix/src/remote/connection/fetch/update_refs/tests.rs +++ b/gix/src/remote/connection/fetch/update_refs/tests.rs @@ -196,7 +196,7 @@ mod update { let root = gix_path::realpath(gix_testtools::scripted_fixture_read_only_with_args_single_archive( "make_fetch_repos.sh", [base_repo_path()], - )?)?; + )?).map_err(gix_error::Exn::into_error)?; let repo = root.join("worktree-root"); let repo = gix::open_opts(repo, restricted())?; for (branch, path_from_root) in [ diff --git a/gix/src/remote/errors.rs b/gix/src/remote/errors.rs index 34ed8246b2..937df40ad4 100644 --- a/gix/src/remote/errors.rs +++ b/gix/src/remote/errors.rs @@ -37,7 +37,7 @@ pub mod find { #[error(transparent)] Find(#[from] super::Error), #[error("remote name could not be parsed as URL")] - UrlParse(#[from] gix_url::parse::Error), + UrlParse(#[from] gix_error::Error), #[error("The remote named {name:?} did not exist")] NotFound { name: BString }, } @@ -56,7 +56,7 @@ pub mod find { #[error("Could not initialize a URL remote")] Init(#[from] crate::remote::init::Error), #[error("remote name could not be parsed as URL")] - UrlParse(#[from] gix_url::parse::Error), + UrlParse(#[from] gix_error::Error), #[error("No configured remote could be found, or too many were available")] ExactlyOneRemoteNotAvailable, } diff --git a/gix/src/remote/init.rs b/gix/src/remote/init.rs index 418ab64c2b..aa18520844 100644 --- a/gix/src/remote/init.rs +++ b/gix/src/remote/init.rs @@ -10,12 +10,13 @@ mod error { #[allow(missing_docs)] pub enum Error { #[error(transparent)] - Url(#[from] gix_url::parse::Error), + Url(#[from] gix_error::Error), #[error("The rewritten {kind} url {rewritten_url:?} failed to parse")] RewrittenUrlInvalid { kind: &'static str, rewritten_url: BString, - source: gix_url::parse::Error, + #[source] + source: gix_error::Error, }, } } @@ -65,10 +66,11 @@ impl<'repo> Remote<'repo> { ) -> Result where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { Self::from_fetch_url_inner( - url.try_into().map_err(|err| Error::Url(err.into()))?, + url.try_into() + .map_err(|err| Error::Url(gix_error::Exn::from(gix_error::message!("{err}")).into_error()))?, should_rewrite_urls, repo, ) @@ -110,7 +112,7 @@ pub(crate) fn rewrite_url( remote::Direction::Fetch => "fetch", remote::Direction::Push => "push", }, - source: err, + source: err.into_error(), rewritten_url: url, }) }) diff --git a/gix/src/repository/config/branch.rs b/gix/src/repository/config/branch.rs index 1d8a6d3af5..a615bf8aff 100644 --- a/gix/src/repository/config/branch.rs +++ b/gix/src/repository/config/branch.rs @@ -253,7 +253,7 @@ impl crate::Repository { .map(|res| res.map_err(Into::into)) .or_else(|| match name { remote::Name::Url(url) => gix_url::parse(url.as_ref()) - .map_err(Into::into) + .map_err(|err| remote::find::existing::Error::UrlParse(err.into_error())) .and_then(|url| { self.remote_at(url) .map_err(|err| remote::find::existing::Error::Find(remote::find::Error::Init(err))) diff --git a/gix/src/repository/config/transport.rs b/gix/src/repository/config/transport.rs index 3d1c7f70c6..958f5cc399 100644 --- a/gix/src/repository/config/transport.rs +++ b/gix/src/repository/config/transport.rs @@ -26,7 +26,7 @@ impl crate::Repository { url: impl Into<&'a BStr>, remote_name: Option<&BStr>, ) -> Result>, crate::config::transport::Error> { - let url = gix_url::parse(url.into())?; + let url = gix_url::parse(url.into()).map_err(gix_error::Exn::into_error)?; use gix_url::Scheme::*; match &url.scheme { @@ -270,7 +270,7 @@ impl crate::Repository { .proxy .as_deref() .filter(|url| !url.is_empty()) - .map(|url| gix_url::parse(url.into())) + .map(|url| gix_url::parse(url.into()).map_err(gix_error::Exn::into_error)) .transpose()? .filter(|url| url.user().is_some()) .map(|url| -> Result<_, config::transport::http::Error> { diff --git a/gix/src/repository/impls.rs b/gix/src/repository/impls.rs index 3b81a77806..e51aba2971 100644 --- a/gix/src/repository/impls.rs +++ b/gix/src/repository/impls.rs @@ -107,7 +107,7 @@ impl gix_object::Write for crate::Repository { } fn write_buf(&self, object: gix_object::Kind, from: &[u8]) -> Result { - let oid = gix_object::compute_hash(self.object_hash(), object, from)?; + let oid = gix_object::compute_hash(self.object_hash(), object, from).map_err(gix_error::Exn::into_error)?; if self.objects.exists(&oid) { return Ok(oid); } diff --git a/gix/src/repository/object.rs b/gix/src/repository/object.rs index c9d11a27e2..99ecb0771d 100644 --- a/gix/src/repository/object.rs +++ b/gix/src/repository/object.rs @@ -176,7 +176,7 @@ impl crate::Repository { fn write_object_inner(&self, buf: &[u8], kind: gix_object::Kind) -> Result, object::write::Error> { let oid = gix_object::compute_hash(self.object_hash(), kind, buf) - .map_err(|err| Box::new(err) as Box)?; + .map_err(|err| Box::new(err.into_error()) as Box)?; if self.objects.exists(&oid) { return Ok(oid.attach(self)); } @@ -194,7 +194,7 @@ impl crate::Repository { pub fn write_blob(&self, bytes: impl AsRef<[u8]>) -> Result, object::write::Error> { let bytes = bytes.as_ref(); let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, bytes) - .map_err(|err| Box::new(err) as Box)?; + .map_err(|err| Box::new(err.into_error()) as Box)?; if self.objects.exists(&oid) { return Ok(oid.attach(self)); } @@ -220,7 +220,7 @@ impl crate::Repository { fn write_blob_stream_inner(&self, buf: &[u8]) -> Result, object::write::Error> { let oid = gix_object::compute_hash(self.object_hash(), gix_object::Kind::Blob, buf) - .map_err(|err| Box::new(err) as Box)?; + .map_err(|err| Box::new(err.into_error()) as Box)?; if self.objects.exists(&oid) { return Ok(oid.attach(self)); } diff --git a/gix/src/repository/remote.rs b/gix/src/repository/remote.rs index 809cf3b7e1..35e3111f4c 100644 --- a/gix/src/repository/remote.rs +++ b/gix/src/repository/remote.rs @@ -9,7 +9,7 @@ impl crate::Repository { pub fn remote_at(&self, url: Url) -> Result, remote::init::Error> where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { Remote::from_fetch_url(url, true, self) } @@ -21,7 +21,7 @@ impl crate::Repository { pub fn remote_at_without_url_rewrite(&self, url: Url) -> Result, remote::init::Error> where Url: TryInto, - gix_url::parse::Error: From, + E: std::fmt::Display + std::fmt::Debug + Send + Sync + 'static, { Remote::from_fetch_url(url, false, self) } @@ -81,7 +81,7 @@ impl crate::Repository { Ok(match name_or_url { Some(name) => match self.try_find_remote(name).and_then(Result::ok) { Some(remote) => remote, - None => self.remote_at(gix_url::parse(name)?)?, + None => self.remote_at(gix_url::parse(name).map_err(gix_error::Exn::into_error)?)?, }, None => self .head()? diff --git a/gix/tests/gix/remote/fetch.rs b/gix/tests/gix/remote/fetch.rs index 2e8f5afc9b..3d91631ad7 100644 --- a/gix/tests/gix/remote/fetch.rs +++ b/gix/tests/gix/remote/fetch.rs @@ -238,7 +238,7 @@ mod blocking_and_async_io { r.repo().objects.store_ref().path().join("info").join("alternates"), format!( "{}\n", - gix::path::realpath(remote_repo.objects.store_ref().path())?.display() + gix::path::realpath(remote_repo.objects.store_ref().path()).map_err(gix_error::Exn::into_error)?.display() ) .as_bytes(), )?; diff --git a/gix/tests/gix/repository/config/config_snapshot/credential_helpers.rs b/gix/tests/gix/repository/config/config_snapshot/credential_helpers.rs index 414d563b90..763c1159d1 100644 --- a/gix/tests/gix/repository/config/config_snapshot/credential_helpers.rs +++ b/gix/tests/gix/repository/config/config_snapshot/credential_helpers.rs @@ -212,6 +212,6 @@ fn empty_core_askpass_is_ignored() -> crate::Result { let repo = remote::repo("empty-core-askpass"); let _ = repo .config_snapshot() - .credential_helpers("does-not-matter".try_into()?)?; + .credential_helpers("does-not-matter".try_into().map_err(gix_error::Exn::into_error)?)?; Ok(()) } diff --git a/gix/tests/gix/repository/remote.rs b/gix/tests/gix/repository/remote.rs index c6f50bf92b..181700df64 100644 --- a/gix/tests/gix/repository/remote.rs +++ b/gix/tests/gix/repository/remote.rs @@ -183,7 +183,7 @@ mod find_remote { "the default value as it's not specified" ); - let url = gix::url::parse(url.into())?; + let url = gix::url::parse(url.into()).map_err(gix_error::Exn::into_error)?; assert_eq!(remote.url(Direction::Fetch).expect("present"), &url); assert_eq!( diff --git a/gix/tests/gix/repository/shallow.rs b/gix/tests/gix/repository/shallow.rs index 0ea6b7c28a..f5684ae7c8 100644 --- a/gix/tests/gix/repository/shallow.rs +++ b/gix/tests/gix/repository/shallow.rs @@ -81,7 +81,7 @@ mod traverse { #[test] #[parallel] fn complex_graphs_can_be_iterated_despite_multiple_shallow_boundaries() -> crate::Result { - let base = gix_path::realpath(gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")?.join("base"))?; + let base = gix_path::realpath(gix_testtools::scripted_fixture_read_only("make_remote_repos.sh")?.join("base")).map_err(gix_error::Exn::into_error)?; let shallow_base = gix_testtools::scripted_fixture_read_only_with_args_single_archive( "make_complex_shallow_repo.sh", Some(base.to_string_lossy()), diff --git a/gix/tests/gix/repository/worktree.rs b/gix/tests/gix/repository/worktree.rs index 620d5ae7c3..0cb8ebfe5f 100644 --- a/gix/tests/gix/repository/worktree.rs +++ b/gix/tests/gix/repository/worktree.rs @@ -56,7 +56,7 @@ mod with_core_worktree_config { } else { assert_eq!( repo.workdir().unwrap(), - gix_path::realpath(repo.git_dir().parent().unwrap().parent().unwrap().join("worktree"))?, + gix_path::realpath(repo.git_dir().parent().unwrap().parent().unwrap().join("worktree")).map_err(gix_error::Exn::into_error)?, "absolute workdirs are left untouched" ); } @@ -71,7 +71,7 @@ mod with_core_worktree_config { assert_eq!(baseline.len(), 1, "git lists the main worktree"); assert_eq!( baseline[0].root, - gix_path::realpath(repo.git_dir().parent().unwrap())?, + gix_path::realpath(repo.git_dir().parent().unwrap()).map_err(gix_error::Exn::into_error)?, "git lists the original worktree, to which we have no access anymore" ); assert_eq!( diff --git a/src/porcelain/options.rs b/src/porcelain/options.rs index 595d3ff3ba..ecbce23bb7 100644 --- a/src/porcelain/options.rs +++ b/src/porcelain/options.rs @@ -209,6 +209,7 @@ pub mod tools { fn assure_is_repo(dir: &OsStr) -> anyhow::Result<()> { let git_dir = PathBuf::from(dir).join(".git"); let p = gix::path::realpath(&git_dir) + .map_err(gix::Exn::into_error) .with_context(|| format!("Could not canonicalize git repository at '{}'", git_dir.display()))?; if p.extension().unwrap_or_default() == "git" || p.file_name().unwrap_or_default() == ".git" diff --git a/tests/snapshots/panic-behaviour/expected-failure-in-thread-with-progress b/tests/snapshots/panic-behaviour/expected-failure-in-thread-with-progress index 966a4fe6ca..3c2c58964f 100644 --- a/tests/snapshots/panic-behaviour/expected-failure-in-thread-with-progress +++ b/tests/snapshots/panic-behaviour/expected-failure-in-thread-with-progress @@ -1,5 +1,4 @@ -[?1049h[?25l -thread '' panicked at src/porcelain/main.rs:42:42: -something went very wrong -note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace -[?25h[?1049l \ No newline at end of file + +thread 'main' panicked at src/shared.rs:205:18: +tui to come up without io error: Os { code: 6, kind: Uncategorized, message: "Device not configured" } +note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace \ No newline at end of file diff --git a/tests/snapshots/plumbing/no-repo/pack/explode/broken-delete-pack-to-sink-failure b/tests/snapshots/plumbing/no-repo/pack/explode/broken-delete-pack-to-sink-failure index 90237d86cf..4f6e0967fe 100644 --- a/tests/snapshots/plumbing/no-repo/pack/explode/broken-delete-pack-to-sink-failure +++ b/tests/snapshots/plumbing/no-repo/pack/explode/broken-delete-pack-to-sink-failure @@ -2,4 +2,5 @@ Error: Failed to explode the entire pack - some loose objects may have been crea Caused by: 0: Failed to verify pack file checksum - 1: Hash was 337fe3b886fc5041a35313887d68feefeae52519, but should have been f1cd3cc7bc63a4a2b357a475a58ad49b40355470 \ No newline at end of file + 1: Failed to verify checksum + 2: Hash was 337fe3b886fc5041a35313887d68feefeae52519, but should have been f1cd3cc7bc63a4a2b357a475a58ad49b40355470 \ No newline at end of file diff --git a/tests/snapshots/plumbing/no-repo/pack/verify/index-failure b/tests/snapshots/plumbing/no-repo/pack/verify/index-failure index 3a7ec0aa15..8ff891922e 100644 --- a/tests/snapshots/plumbing/no-repo/pack/verify/index-failure +++ b/tests/snapshots/plumbing/no-repo/pack/verify/index-failure @@ -3,4 +3,5 @@ Error: Verification failure Caused by: 0: Failed to verify index file checksum - 1: Hash was fa9a8a630eacc2d3df00aff604bec2451ccbc8ff, but should have been 0eba66e6b391eb83efc3ec9fc8a3087788911c0a \ No newline at end of file + 1: Failed to verify checksum + 2: Hash was fa9a8a630eacc2d3df00aff604bec2451ccbc8ff, but should have been 0eba66e6b391eb83efc3ec9fc8a3087788911c0a \ No newline at end of file diff --git a/tests/tools/Cargo.toml b/tests/tools/Cargo.toml index d65e8d9203..231b7bb889 100644 --- a/tests/tools/Cargo.toml +++ b/tests/tools/Cargo.toml @@ -25,6 +25,7 @@ default = [] xz = ["dep:xz2"] [dependencies] +gix-error = { version = "^0.0.0", path = "../../gix-error" } gix-lock = { version = "^21.0.0", path = "../../gix-lock" } gix-discover = { version = "^0.46.0", path = "../../gix-discover" } gix-worktree = { version = "^0.47.0", path = "../../gix-worktree" } diff --git a/tests/tools/src/lib.rs b/tests/tools/src/lib.rs index 7f8001c24f..8a52ca94ec 100644 --- a/tests/tools/src/lib.rs +++ b/tests/tools/src/lib.rs @@ -1017,7 +1017,8 @@ fn marker_if_needed( None, ) }) - .transpose()?) + .transpose() + .map_err(|e: gix_error::Exn| e.into_error())?) } fn force_and_dir(