Skip to content

Commit

Permalink
feat(rust/catalyst-types): Add a new Rust crate catalyst-types (#138)
Browse files Browse the repository at this point in the history
* feat: initial commit

* ci: register crate

* feat(rust): Add uniform problem report type (#140)

* feat(rust): Add uniform problem report type

* fix(docs): spelling

* fix(rust): catalyst-types ci build

* fix(rust): doc tests

* feat: hash and coversion

* feat: uuid

* refactor: rename

* chore: fmtfix

* chore: fmtfix again

* test: integration

* fix: uuid type

* feat(rust/catalyst-types): Add duplicate data error report (#141)

* feat(catalyst-types): add duplicate data error report

Signed-off-by: bkioshn <[email protected]>

* fix(catalyst-types): format

Signed-off-by: bkioshn <[email protected]>

* fix(catalyst-types): add duplicate field description and change name

Signed-off-by: bkioshn <[email protected]>

* Update rust/catalyst-types/src/problem_report.rs

---------

Signed-off-by: bkioshn <[email protected]>
Co-authored-by: Steven Johnson <[email protected]>

* chore: fmtfix

* chore: rename module fixing lint

* fix: module name

* chore: lintfix rename

* feat: kid

* chore: fmtfix

* feat: improve type errors

* chore: fmtfix

* fix: cspell

* fix: error coverage

* fix: lint

* fix: test

* fix: minor

* feat: lintfix

* fix: comment

* fix: comment

* chore: fmtfix

* Update rust/catalyst-types/src/conversion.rs

Co-authored-by: bkioshn <[email protected]>

* docs: update

* chore: kiduri

* chore: displaydoc

* fix: add displaydoc to .dic

---------

Signed-off-by: bkioshn <[email protected]>
Co-authored-by: Steven Johnson <[email protected]>
Co-authored-by: Steven Johnson <[email protected]>
Co-authored-by: bkioshn <[email protected]>
  • Loading branch information
4 people authored Jan 7, 2025
1 parent 3c5d97a commit 768ecd5
Show file tree
Hide file tree
Showing 18 changed files with 1,452 additions and 2 deletions.
2 changes: 2 additions & 0 deletions .config/dictionaries/project.dic
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ dbsync
dcbor
decompressor
delegators
displaydoc
dleq
dlog
dockerhub
Expand Down Expand Up @@ -254,6 +255,7 @@ Traceback
txmonitor
txns
typenum
uncategorized
unfinalized
unixfs
unlinkat
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/semantic_pull_request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ jobs:
rust
rust/c509-certificate
rust/cardano-chain-follower
rust/catalyst-types
rust/catalyst-voting
rust/immutable-ledger
rust/vote-tx-v1
Expand Down
2 changes: 1 addition & 1 deletion rust/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ members = [
"cbork-cddl-parser",
"cbork-utils",
"catalyst-voting",
"catalyst-voting",
"catalyst-types",
"immutable-ledger",
"vote-tx-v1",
"vote-tx-v2",
Expand Down
3 changes: 2 additions & 1 deletion rust/Earthfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ COPY_SRC:
Cargo.toml clippy.toml deny.toml rustfmt.toml \
.cargo .config \
c509-certificate \
catalyst-types \
cardano-blockchain-types \
cardano-chain-follower \
catalyst-voting vote-tx-v1 vote-tx-v2 \
Expand Down Expand Up @@ -55,7 +56,7 @@ build:
DO rust-ci+EXECUTE \
--cmd="/scripts/std_build.py" \
--args1="--libs=c509-certificate --libs=cardano-blockchain-types --libs=cardano-chain-follower --libs=hermes-ipfs" \
--args2="--libs=cbork-cddl-parser --libs=cbork-abnf-parser --libs=cbork-utils" \
--args2="--libs=cbork-cddl-parser --libs=cbork-abnf-parser --libs=cbork-utils --libs=catalyst-types" \
--args3="--libs=catalyst-voting --libs=immutable-ledger --libs=vote-tx-v1 --libs=vote-tx-v2" \
--args4="--bins=cbork/cbork --libs=rbac-registration --libs=signed_doc" \
--args5="--cov_report=$HOME/build/coverage-report.info" \
Expand Down
36 changes: 36 additions & 0 deletions rust/catalyst-types/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[package]
name = "catalyst-types"
version = "0.0.1"
edition.workspace = true
license.workspace = true
authors.workspace = true
homepage.workspace = true
repository.workspace = true

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[lints]
workspace = true

[lib]
name = "catalyst_types"

[dependencies]
blake2b_simd = "1.0.2"
coset = "0.3.8"
displaydoc = "0.2.5"
ed25519-dalek = "2.1.1"
fluent-uri = "0.3.2"
hex = "0.4.3"
minicbor = { version = "0.25.1", features = ["std"] }
num-traits = "0.2.19"
orx-concurrent-vec = "3.1.0"
pallas-crypto = { version = "0.30.1", git = "https://github.com/input-output-hk/catalyst-pallas.git", rev = "9b5183c8b90b90fe2cc319d986e933e9518957b3" }
serde = { version = "1.0.217", features = ["derive"] }
thiserror = "2.0.9"
base64-url = "3.0.0"
uuid = { version = "1.11.0", features = ["v4", "v7", "serde"] }

[dev-dependencies]
ed25519-dalek = { version = "2.1.1", features = ["rand_core"] }
rand = "0.8.5"
9 changes: 9 additions & 0 deletions rust/catalyst-types/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
# Catalyst Types

This library is designed to streamline the organization and sharing of types across multiple crates.
It provides a centralized location for reusable, enhanced types that are not specific to a particular domain, such as Cardano.

## Purpose

* To enhance types that can be utilized across different libraries or projects.
* To provide utility functions related to types and conversion between types.
70 changes: 70 additions & 0 deletions rust/catalyst-types/src/conversion.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
//! Conversion functions
use displaydoc::Display;
use thiserror::Error;

/// Errors that can occur when converting bytes to an Ed25519 verifying key.
#[derive(Display, Debug, Error)]
pub enum VKeyFromBytesError {
/// Invalid byte length: expected {expected} bytes, got {actual}
InvalidLength {
/// The expected number of bytes (must be 32).
expected: usize,
/// The actual number of bytes in the provided input.
actual: usize,
},
/// Failed to parse Ed25519 public key: {source}
ParseError {
/// The underlying error from `ed25519_dalek`.
#[from]
source: ed25519_dalek::SignatureError,
},
}

/// Convert an `<T>` to `<R>` (saturate if out of range).
/// Note can convert any int to float, or f32 to f64 as well.
/// can not convert from float to int, or f64 to f32.
pub fn from_saturating<
R: Copy + num_traits::identities::Zero + num_traits::Bounded,
T: Copy
+ TryInto<R>
+ std::ops::Sub<Output = T>
+ std::cmp::PartialOrd<T>
+ num_traits::identities::Zero,
>(
value: T,
) -> R {
match value.try_into() {
Ok(value) => value,
Err(_) => {
// If we couldn't convert, its out of range for the destination type.
if value > T::zero() {
// If the number is positive, its out of range in the positive direction.
R::max_value()
} else {
// Otherwise its out of range in the negative direction.
R::min_value()
}
},
}
}

/// Try and convert a byte array into an Ed25519 verifying key.
///
/// # Errors
///
/// Fails if the bytes are not a valid ED25519 Public Key
pub fn vkey_from_bytes(bytes: &[u8]) -> Result<ed25519_dalek::VerifyingKey, VKeyFromBytesError> {
if bytes.len() != ed25519_dalek::PUBLIC_KEY_LENGTH {
return Err(VKeyFromBytesError::InvalidLength {
expected: ed25519_dalek::PUBLIC_KEY_LENGTH,
actual: bytes.len(),
});
}

let mut ed25519 = [0u8; ed25519_dalek::PUBLIC_KEY_LENGTH];
ed25519.copy_from_slice(bytes); // Can't panic because we already validated its size.

ed25519_dalek::VerifyingKey::from_bytes(&ed25519)
.map_err(|source| VKeyFromBytesError::ParseError { source })
}
210 changes: 210 additions & 0 deletions rust/catalyst-types/src/hashes.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
//! Cardano hashing functions
use std::{fmt, str::FromStr};

use blake2b_simd::Params;
use displaydoc::Display;
use pallas_crypto::hash::Hash;
use thiserror::Error;

/// Number of bytes in a blake2b 224 hash.
pub const BLAKE_2B224_SIZE: usize = 224 / 8;

/// `Blake2B` 224bit Hash.
pub type Blake2b224Hash = Blake2bHash<BLAKE_2B224_SIZE>;

/// Number of bytes in a blake2b 256 hash.
pub const BLAKE_2B256_SIZE: usize = 256 / 8;

/// `Blake2B` 256bit Hash
pub type Blake2b256Hash = Blake2bHash<BLAKE_2B256_SIZE>;

/// Number of bytes in a blake2b 128 hash.
pub const BLAKE_2B128_SIZE: usize = 128 / 8;

/// `Blake2B` 128bit Hash
pub type Blake2b128Hash = Blake2bHash<BLAKE_2B128_SIZE>;

/// Errors that can occur when converting to a `Blake2bHash`.
#[derive(Display, Debug, Error)]
pub enum Blake2bHashError {
/// Invalid length: expected {expected} bytes, got {actual}
InvalidLength {
/// The expected number of bytes (must be 32 or 28).
expected: usize,
/// The actual number of bytes in the provided input.
actual: usize,
},
/// Invalid hex string: {source}
InvalidHex {
/// The underlying error from `hex`.
#[from]
source: hex::FromHexError,
},
}

/// data that is a blake2b [`struct@Hash`] of `BYTES` long.
///
/// Possible values with Cardano are 32 bytes long (block hash or transaction
/// hash). Or 28 bytes long (as used in addresses)
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Blake2bHash<const BYTES: usize>(Hash<BYTES>);

impl<const BYTES: usize> Blake2bHash<BYTES> {
/// Create a new `Blake2bHash` from a slice of bytes by hashing them.
#[must_use]
pub fn new(input_bytes: &[u8]) -> Self {
let mut bytes: [u8; BYTES] = [0u8; BYTES];

// Generate a unique hash of the data.
let mut hasher = Params::new().hash_length(BYTES).to_state();

hasher.update(input_bytes);
let hash = hasher.finalize();

// Create a new array containing the first BYTES elements from the original array
bytes.copy_from_slice(hash.as_bytes());

bytes.into()
}
}

impl<const BYTES: usize> From<[u8; BYTES]> for Blake2bHash<BYTES> {
#[inline]
fn from(bytes: [u8; BYTES]) -> Self {
let hash: Hash<BYTES> = bytes.into();
hash.into()
}
}

impl<const BYTES: usize> From<Hash<BYTES>> for Blake2bHash<BYTES> {
#[inline]
fn from(bytes: Hash<BYTES>) -> Self {
Self(bytes)
}
}

impl<const BYTES: usize> From<Blake2bHash<BYTES>> for Vec<u8> {
fn from(val: Blake2bHash<BYTES>) -> Self {
val.0.to_vec()
}
}

/// Convert hash in a form of byte array into the `Blake2bHash` type.
impl<const BYTES: usize> TryFrom<&[u8]> for Blake2bHash<BYTES> {
type Error = Blake2bHashError;

fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
if value.len() < BYTES {
return Err(Blake2bHashError::InvalidLength {
expected: BYTES,
actual: value.len(),
});
}

let mut hash = [0; BYTES];
hash.copy_from_slice(value);
let hash: Hash<BYTES> = hash.into();
Ok(hash.into())
}
}

impl<const BYTES: usize> TryFrom<&Vec<u8>> for Blake2bHash<BYTES> {
type Error = Blake2bHashError;

fn try_from(value: &Vec<u8>) -> Result<Self, Self::Error> {
value.as_slice().try_into()
}
}

impl<const BYTES: usize> TryFrom<Vec<u8>> for Blake2bHash<BYTES> {
type Error = Blake2bHashError;

fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
value.as_slice().try_into()
}
}

impl<const BYTES: usize> fmt::Debug for Blake2bHash<BYTES> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&format!("{:?}", self.0))
}
}

impl<const BYTES: usize> fmt::Display for Blake2bHash<BYTES> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&format!("{}", self.0))
}
}

impl<const BYTES: usize> FromStr for Blake2bHash<BYTES> {
type Err = Blake2bHashError;

fn from_str(s: &str) -> Result<Self, Self::Err> {
let hash: Hash<BYTES> = s.parse().map_err(Blake2bHashError::from)?;
Ok(hash.into())
}
}

impl<C, const BYTES: usize> minicbor::Encode<C> for Blake2bHash<BYTES> {
fn encode<W: minicbor::encode::Write>(
&self, e: &mut minicbor::Encoder<W>, _ctx: &mut C,
) -> Result<(), minicbor::encode::Error<W::Error>> {
e.bytes(self.0.as_ref())?.ok()
}
}

impl<'a, C, const BYTES: usize> minicbor::Decode<'a, C> for Blake2bHash<BYTES> {
fn decode(
d: &mut minicbor::Decoder<'a>, _ctx: &mut C,
) -> Result<Self, minicbor::decode::Error> {
let bytes = d.bytes()?;
bytes.try_into().map_err(|_| {
minicbor::decode::Error::message("Invalid hash size for Blake2bHash cbor decode")
})
}
}

#[cfg(test)]
mod tests {
use super::*;

#[test]
fn test_blake2b_hash_init() {
let data = b"Cardano";
let hash_224 = Blake2b224Hash::new(data);
let hash_256 = Blake2b256Hash::new(data);
let hash_128 = Blake2b128Hash::new(data);

assert_eq!(hash_224.0.as_ref().len(), BLAKE_2B224_SIZE);
assert_eq!(hash_256.0.as_ref().len(), BLAKE_2B256_SIZE);
assert_eq!(hash_128.0.as_ref().len(), BLAKE_2B128_SIZE);
}

#[test]
fn test_blake2b_hash_conversion() {
let data = b"Cardano";
let hash = Blake2b224Hash::new(data);

let as_vec: Vec<u8> = hash.into();
let from_vec = Blake2b224Hash::try_from(&as_vec).unwrap();
assert_eq!(hash, from_vec);

let from_slice = Blake2b224Hash::try_from(as_vec.as_slice()).unwrap();
assert_eq!(hash, from_slice);
}

#[test]
fn test_blake2b_hash_invalid_length() {
let invalid_data = vec![0u8; 10];
let result = Blake2b224Hash::try_from(&invalid_data);
assert!(result.is_err());

if let Err(Blake2bHashError::InvalidLength { expected, actual }) = result {
assert_eq!(expected, BLAKE_2B224_SIZE);
assert_eq!(actual, invalid_data.len());
} else {
panic!("Expected InvalidLength error");
}
}
}
Loading

0 comments on commit 768ecd5

Please sign in to comment.