diff --git a/Cargo.lock b/Cargo.lock index a572305baf..93bec81617 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2580,6 +2580,20 @@ dependencies = [ "syn 2.0.96", ] +[[package]] +name = "idb" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3afe8830d5802f769dc0be20a87f9f116798c896650cb6266eb5c19a3c109eed" +dependencies = [ + "js-sys", + "num-traits", + "thiserror 1.0.69", + "tokio", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -5157,9 +5171,11 @@ dependencies = [ name = "nimiq-web-client" version = "1.0.6" dependencies = [ + "ark-serialize", "futures-util", "gloo-timers 0.3.0", "hex", + "idb", "js-sys", "nimiq-account", "nimiq-block", @@ -5185,6 +5201,8 @@ dependencies = [ "rand_core", "serde", "serde-wasm-bindgen 0.6.5", + "serde_bytes", + "serde_json", "tokio", "tracing", "tsify", diff --git a/bls/src/lazy.rs b/bls/src/lazy.rs index 952e6f27b6..06f3775a68 100644 --- a/bls/src/lazy.rs +++ b/bls/src/lazy.rs @@ -97,7 +97,12 @@ impl LazyPublicKey { impl From for LazyPublicKey { fn from(key: PublicKey) -> LazyPublicKey { - LazyPublicKey(cache().intern(&key.compress(), OnceLock::from(Some(key)))) + let result = LazyPublicKey(cache().intern(&key.compress(), OnceLock::from(Some(key)))); + // TODO: This might block while another thread computes the decompression. :/ + // + // Needs an addition in the Rust standard library to fix. + let _ = result.0.value().set(Some(key)); + result } } diff --git a/bls/src/types/compressed_public_key.rs b/bls/src/types/compressed_public_key.rs index d56c7fa734..93c3439495 100644 --- a/bls/src/types/compressed_public_key.rs +++ b/bls/src/types/compressed_public_key.rs @@ -35,6 +35,7 @@ impl CompressedPublicKey { /// Transforms the compressed form back into the projective form. pub fn uncompress(&self) -> Result { + log::info!("decompressing {}", &self.to_hex()[..16]); let affine_point: G2Affine = CanonicalDeserialize::deserialize_compressed(&mut &self.public_key[..]) .map_err(|e| Error::new(ErrorKind::Other, e))?; diff --git a/bls/src/types/public_key.rs b/bls/src/types/public_key.rs index 20c55c30af..9be31d818c 100644 --- a/bls/src/types/public_key.rs +++ b/bls/src/types/public_key.rs @@ -4,7 +4,7 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup, Group}; use ark_ff::Zero; pub use ark_mnt6_753::G2Projective; use ark_mnt6_753::{G1Projective, MNT6_753}; -use ark_serialize::CanonicalSerialize; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use log::error; use nimiq_hash::Hash; @@ -19,6 +19,8 @@ pub struct PublicKey { } impl PublicKey { + pub const TRUSTED_SERIALIZATION_SIZE: usize = 570; + /// Generates a public key from a given point in G2. This function will produce an error if it is given the point at infinity. pub fn new(public_key: G2Projective) -> Self { if public_key.is_zero() { @@ -60,8 +62,9 @@ impl PublicKey { /// and one bit indicating if it is the "point-at-infinity". pub fn compress(&self) -> CompressedPublicKey { let mut buffer = [0u8; CompressedPublicKey::SIZE]; - CanonicalSerialize::serialize_compressed(&self.public_key.into_affine(), &mut buffer[..]) - .unwrap(); + let affine = self.public_key.into_affine(); + assert_eq!(affine.compressed_size(), buffer.len()); + affine.serialize_compressed(&mut buffer[..]).unwrap(); CompressedPublicKey { public_key: buffer } } @@ -72,6 +75,24 @@ impl PublicKey { let public_key = self.public_key.mul_bigint([x as u64]); PublicKey { public_key } } + + pub fn trusted_serialize(&self) -> [u8; PublicKey::TRUSTED_SERIALIZATION_SIZE] { + let mut result = [0u8; PublicKey::TRUSTED_SERIALIZATION_SIZE]; + assert_eq!(self.public_key.uncompressed_size(), result.len()); + self.public_key + .serialize_uncompressed(&mut result[..]) + .unwrap(); + result + } + + pub fn trusted_deserialize( + serialized: &[u8; PublicKey::TRUSTED_SERIALIZATION_SIZE], + ) -> PublicKey { + PublicKey { + public_key: CanonicalDeserialize::deserialize_uncompressed_unchecked(&serialized[..]) + .unwrap(), + } + } } impl Eq for PublicKey {} diff --git a/bls/tests/tests.rs b/bls/tests/tests.rs index dd27d648f9..056e5b7a67 100644 --- a/bls/tests/tests.rs +++ b/bls/tests/tests.rs @@ -188,3 +188,17 @@ fn aggregate_signatures_serialization() { &AggregateSignature::deserialize_from_vec(&ser_agg_sig).unwrap() )); } + +#[test] +fn trusted_serialization() { + let hex_public_key = "ae4cc2e31e04add9a6d379b4379b02f302971503cbac8d02fdc5d2dc8204d24ec8d095627d037de747f1a8ea7bf3c1693262d947f78e0cc73c18ecc2f2ec5b2249d551e1680fe0c973a7951bd78d4fbe0326be71286ed34004d2443eb3b00167a02edffcfd2b8539448fa116c5454da2d181dc03ea8cfe3fedb58b9b945d5e506c794deb3ba73983005b3ff799212bf59030a8dd17ff48fd5d015695195a022fed8ba4fab28a4c3e2d6f41be0e6315e41824df161219c02be5a281c215011c13131184187e9100d2d6a5321fd9b154806ecc78e93b91331a5334b8876fd1b8ea62b17ce6045fc9e1af60b7705b0cf86dba79f5bcb8320c99a45f3b7c7178f8f87ba953de2755c61af882059c1de1d7a35357f06cd4a7d954e4bb211900"; + + let raw_public_key: Vec = hex::decode(hex_public_key).unwrap(); + let compressed_public_key = CompressedPublicKey::deserialize_from_vec(&raw_public_key).unwrap(); + + let public_key = compressed_public_key.uncompress().unwrap(); + assert_eq!( + PublicKey::trusted_deserialize(&public_key.trusted_serialize()).public_key, + public_key.public_key, + ); +} diff --git a/genesis-builder/src/lib.rs b/genesis-builder/src/lib.rs index 7dff82774c..7a4e94e163 100644 --- a/genesis-builder/src/lib.rs +++ b/genesis-builder/src/lib.rs @@ -1,11 +1,7 @@ #[macro_use] extern crate log; -use std::{ - fs::{read_to_string, OpenOptions}, - io::Error as IoError, - path::Path, -}; +use std::{fs, io::Error as IoError, path::Path}; use nimiq_account::{ Account, Accounts, BasicAccount, HashedTimeLockedContract, StakingContract, @@ -217,7 +213,7 @@ impl GenesisBuilder { /// /// See `genesis/src/genesis/unit-albatross.toml` for an example. pub fn from_config_file>(path: P) -> Result { - Self::from_config(toml::from_str(&read_to_string(path)?)?) + Self::from_config(toml::from_str(&fs::read_to_string(path)?)?) } pub fn from_config(config: config::GenesisConfig) -> Result { @@ -647,6 +643,8 @@ impl GenesisBuilder { db: MdbxDatabase, directory: P, ) -> Result<(Blake2bHash, bool), GenesisBuilderError> { + let directory = directory.as_ref(); + let GenesisInfo { block, hash, @@ -658,25 +656,34 @@ impl GenesisBuilder { debug!("Accounts:"); debug!(?accounts); - let block_path = directory.as_ref().join("block.dat"); + let block_path = directory.join("block.dat"); info!(path = %block_path.display(), "Writing block to"); - let mut file = OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(&block_path)?; - block.serialize_to_writer(&mut file)?; + fs::write(block_path, block.serialize_to_vec())?; + + let decompressed_path = directory.join("decompressed_keys.dat"); + if let Block::Macro(MacroBlock { + header: + MacroHeader { + validators: Some(validators), + .. + }, + .. + }) = &block + { + let mut decompressed = Vec::new(); + for key in validators.voting_keys() { + decompressed.extend_from_slice(&key.trusted_serialize()); + } + fs::write(decompressed_path, decompressed)?; + } else { + unreachable!("genesis block must be an election block"); + } let have_accounts = accounts.is_some(); if let Some(accounts) = accounts { - let accounts_path = directory.as_ref().join("accounts.dat"); + let accounts_path = directory.join("accounts.dat"); info!(path = %accounts_path.display(), "Writing accounts to"); - let mut file = OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(&accounts_path)?; - accounts.serialize_to_writer(&mut file)?; + fs::write(accounts_path, accounts.serialize_to_vec())?; } Ok((hash, have_accounts)) diff --git a/genesis/build.rs b/genesis/build.rs index 3f7320399f..3e6f169c57 100644 --- a/genesis/build.rs +++ b/genesis/build.rs @@ -23,6 +23,7 @@ fn write_genesis_rs(directory: &Path, name: &str, genesis_hash: &Blake2bHash, ha let genesis_rs = format!( r#"GenesisData {{ block: include_bytes!(concat!(env!("OUT_DIR"), "/genesis/{name}/block.dat")), + decompressed_keys: include_bytes!(concat!(env!("OUT_DIR"), "/genesis/{name}/decompressed_keys.dat")), hash: Blake2bHash([{hash}]), accounts: {accounts_expr}, }}"#, diff --git a/genesis/src/networks.rs b/genesis/src/networks.rs index 80a88bb047..60696de64b 100644 --- a/genesis/src/networks.rs +++ b/genesis/src/networks.rs @@ -1,8 +1,9 @@ -use std::env; #[cfg(feature = "genesis-override")] use std::path::Path; +use std::{env, sync::OnceLock}; use nimiq_block::Block; +use nimiq_bls::{LazyPublicKey as BlsLazyPublicKey, PublicKey as BlsPublicKey}; #[cfg(feature = "genesis-override")] use nimiq_database::mdbx::MdbxDatabase; #[cfg(feature = "genesis-override")] @@ -17,6 +18,7 @@ use nimiq_serde::Serialize; #[derive(Clone, Debug)] struct GenesisData { block: &'static [u8], + decompressed_keys: &'static [u8], hash: Blake2bHash, accounts: Option<&'static [u8]>, } @@ -81,16 +83,40 @@ fn read_genesis_config(config: &Path) -> Result> = OnceLock::new(); +static KEYS_TEST: OnceLock> = OnceLock::new(); +static KEYS_UNIT: OnceLock> = OnceLock::new(); +static KEYS_MAIN: OnceLock> = OnceLock::new(); + fn network(network_id: NetworkId) -> Option<&'static NetworkInfo> { let result = network_impl(network_id); if let Some(info) = result { assert_eq!(network_id, info.network_id); assert_eq!(network_id, info.genesis_block().network()); + let keys = match network_id { + NetworkId::DevAlbatross => &KEYS_DEV, + NetworkId::TestAlbatross => &KEYS_TEST, + NetworkId::UnitAlbatross => &KEYS_UNIT, + NetworkId::MainAlbatross => &KEYS_MAIN, + _ => unreachable!(), + }; + keys.get_or_init(|| { + info.genesis + .decompressed_keys + .chunks(BlsPublicKey::TRUSTED_SERIALIZATION_SIZE) + .map(|chunk| { + BlsLazyPublicKey::from(BlsPublicKey::trusted_deserialize( + &chunk.try_into().unwrap(), + )) + }) + .collect() + }); } result } diff --git a/web-client/Cargo.toml b/web-client/Cargo.toml index 7751a91398..b8770d344c 100644 --- a/web-client/Cargo.toml +++ b/web-client/Cargo.toml @@ -22,13 +22,17 @@ workspace = true crate-type = ["rlib", "cdylib"] [dependencies] +ark-serialize = "0.4" futures = { workspace = true } gloo-timers = { version = "0.3", features = ["futures"] } hex = "0.4" +idb = "0.6.4" js-sys = "0.3" log = { workspace = true } rand_core = "0.6.4" serde = "1.0" +serde_bytes = "0.11" +serde_json = "1.0" serde-wasm-bindgen = "0.6" tokio = { version = "1.43", features = ["sync"] } tsify = { git = "https://github.com/sisou/tsify", branch = "sisou/comments", default-features = false, features = ["js"] } diff --git a/web-client/src/client/bls_cache.rs b/web-client/src/client/bls_cache.rs new file mode 100644 index 0000000000..144cfcd025 --- /dev/null +++ b/web-client/src/client/bls_cache.rs @@ -0,0 +1,88 @@ +use idb::{Database, Error, KeyPath, ObjectStore, TransactionMode}; +use nimiq_bls::{LazyPublicKey, PublicKey}; +use nimiq_serde::{Deserialize, Serialize}; + +/// Caches decompressed BlsPublicKeys in an IndexedDB +pub(crate) struct BlsCache { + db: Option, + keys: Vec, +} + +#[derive(Deserialize, Serialize)] +struct BlsKeyEntry { + #[serde(with = "serde_bytes")] + public_key: [u8; PublicKey::TRUSTED_SERIALIZATION_SIZE], +} + +const BLS_KEYS: &str = "bls_keys"; +const PUBLIC_KEY: &str = "public_key"; + +impl BlsCache { + pub async fn new() -> Self { + let db = match Database::builder("nimiq_client_cache") + .version(1) + .add_object_store( + ObjectStore::builder(BLS_KEYS).key_path(Some(KeyPath::new_single(PUBLIC_KEY))), + ) + .build() + .await + { + Ok(db) => Some(db), + Err(err) => { + log::warn!("idb: Couldn't create database {}", err); + None + } + }; + + BlsCache { db, keys: vec![] } + } + + /// Add the given keys into IndexedDB. + /// + /// The given keys must be correctly decompressed already, otherwise this + /// function will panic. + pub async fn add_keys(&self, keys: Vec) -> Result<(), Error> { + if let Some(db) = &self.db { + log::info!("storing {} keys in idb", keys.len()); + let transaction = db.transaction(&[BLS_KEYS], TransactionMode::ReadWrite)?; + let bls_keys_store = transaction.object_store(BLS_KEYS)?; + + for key in keys { + assert!(key.has_uncompressed()); + let entry = BlsKeyEntry { + public_key: key + .uncompress() + .expect("must not pass invalid keys to `BlsCache::add_keys`") + .trusted_serialize(), + }; + let entry_js_value = serde_wasm_bindgen::to_value(&entry).unwrap(); + bls_keys_store.put(&entry_js_value, None)?.await?; + } + } else { + log::error!("can't store {} keys in idb", keys.len()); + } + Ok(()) + } + + /// Fetches all bls keys from the IndexedDB and stores them, which makes + /// the decompressed keys available in other places. + pub async fn init(&mut self) -> Result<(), Error> { + if let Some(db) = &self.db { + let transaction = db.transaction(&[BLS_KEYS], TransactionMode::ReadOnly)?; + let bls_keys_store = transaction.object_store(BLS_KEYS)?; + + let js_keys = bls_keys_store.get_all(None, None)?.await?; + log::info!("loaded {} keys from idb", js_keys.len()); + + for js_key in &js_keys { + let value: BlsKeyEntry = serde_wasm_bindgen::from_value(js_key.clone()).unwrap(); + let public_key = PublicKey::trusted_deserialize(&value.public_key); + self.keys.push(LazyPublicKey::from(public_key)); + } + transaction.await?; + } else { + log::error!("couldn't load keys from idb"); + } + Ok(()) + } +} diff --git a/web-client/src/client/lib.rs b/web-client/src/client/lib.rs index 458ac924e2..2ce9067e0f 100644 --- a/web-client/src/client/lib.rs +++ b/web-client/src/client/lib.rs @@ -20,7 +20,9 @@ pub use nimiq::{ }, extras::{panic::initialize_panic_reporting, web_logging::initialize_web_logging}, }; +use nimiq_block::Block; use nimiq_blockchain_interface::{AbstractBlockchain, BlockchainEvent}; +use nimiq_bls::LazyPublicKey; use nimiq_consensus::ConsensusEvent; use nimiq_hash::Blake2bHash; use nimiq_network_interface::{ @@ -43,6 +45,7 @@ use crate::{ PlainValidatorType, }, block::{PlainBlock, PlainBlockType}, + bls_cache::BlsCache, peer_info::{PlainPeerInfo, PlainPeerInfoArrayType}, }, common::{ @@ -113,6 +116,8 @@ pub struct Client { /// Map from transaction hash as hex string to oneshot sender. /// Used to await transaction events in `send_transaction`. transaction_oneshots: Rc>>>, + + bls_cache: Rc>, } #[wasm_bindgen] @@ -182,6 +187,8 @@ impl Client { let zkp_component = client.take_zkp_component().unwrap(); spawn_local(zkp_component); + let bls_cache = BlsCache::new().await; + let client = Client { inner: client, network_id: from_network_id(web_config.network_id), @@ -192,6 +199,7 @@ impl Client { peer_changed_listeners: Rc::new(RefCell::new(HashMap::with_capacity(1))), transaction_listeners: Rc::new(RefCell::new(HashMap::new())), transaction_oneshots: Rc::new(RefCell::new(HashMap::new())), + bls_cache: Rc::new(RefCell::new(bls_cache)), }; client.setup_offline_online_event_handlers(); @@ -200,6 +208,10 @@ impl Client { client.setup_network_events(); client.setup_transaction_events().await; + if let Err(err) = client.bls_cache.borrow_mut().init().await { + log::warn!("Failed loading bls cache {}", err); + } + Ok(client) } @@ -1039,34 +1051,23 @@ impl Client { let mut blockchain_events = blockchain.read().notifier_as_stream(); let block_listeners = Rc::clone(&self.head_changed_listeners); + let bls_cache = Rc::clone(&self.bls_cache); spawn_local(async move { loop { - let (hash, reason, reverted_blocks, adopted_blocks) = + let (hash, reason, reverted_blocks, adopted_block_hashes) = match blockchain_events.next().await { Some(BlockchainEvent::Extended(hash)) => { - let adopted_blocks = Array::new(); - adopted_blocks.push(&hash.to_hex().into()); - - (hash, "extended", Array::new(), adopted_blocks) + (hash.clone(), "extended", Array::new(), vec![hash]) } Some(BlockchainEvent::HistoryAdopted(hash)) => { - let adopted_blocks = Array::new(); - adopted_blocks.push(&hash.to_hex().into()); - - (hash, "history-adopted", Array::new(), adopted_blocks) + (hash.clone(), "history-adopted", Array::new(), vec![hash]) } Some(BlockchainEvent::EpochFinalized(hash)) => { - let adopted_blocks = Array::new(); - adopted_blocks.push(&hash.to_hex().into()); - - (hash, "epoch-finalized", Array::new(), adopted_blocks) + (hash.clone(), "epoch-finalized", Array::new(), vec![hash]) } Some(BlockchainEvent::Finalized(hash)) => { - let adopted_blocks = Array::new(); - adopted_blocks.push(&hash.to_hex().into()); - - (hash, "finalized", Array::new(), adopted_blocks) + (hash.clone(), "finalized", Array::new(), vec![hash]) } Some(BlockchainEvent::Rebranched(old_chain, new_chain)) => { let hash = &new_chain.last().unwrap().0.clone(); @@ -1076,9 +1077,9 @@ impl Client { reverted_blocks.push(&h.to_hex().into()); } - let adopted_blocks = Array::new(); + let mut adopted_blocks = Vec::new(); for (h, _) in new_chain { - adopted_blocks.push(&h.to_hex().into()); + adopted_blocks.push(h); } ( @@ -1089,13 +1090,18 @@ impl Client { ) } Some(BlockchainEvent::Stored(block)) => { - (block.hash(), "stored", Array::new(), Array::new()) + (block.hash(), "stored", Array::new(), Vec::new()) } None => { break; } }; + let adopted_blocks = Array::new(); + for hash in &adopted_block_hashes { + adopted_blocks.push(&hash.to_hex().into()); + } + let args = Array::new(); args.push(&hash.to_hex().into()); args.push(&reason.into()); @@ -1106,6 +1112,23 @@ impl Client { for listener in block_listeners.borrow().values() { let _ = listener.apply(&this, &args); } + + // Cache decompressed validator bls keys + for hash in &adopted_block_hashes { + if let Ok(Block::Macro(macro_block)) = blockchain.read().get_block(hash, false) + { + if let Some(validators) = macro_block.header.validators { + let bls_keys = validators + .validators + .iter() + .map(|validator| validator.voting_key.clone()) + .collect::>(); + if let Err(err) = bls_cache.borrow_mut().add_keys(bls_keys).await { + log::warn!("Failed caching bls keys {}", err); + } + } + } + } } }); } diff --git a/web-client/src/client/mod.rs b/web-client/src/client/mod.rs index d8ac841c7f..7368593ad8 100644 --- a/web-client/src/client/mod.rs +++ b/web-client/src/client/mod.rs @@ -1,4 +1,5 @@ pub mod account; pub mod block; +mod bls_cache; pub mod lib; pub mod peer_info;