-
Notifications
You must be signed in to change notification settings - Fork 68
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Draft: Store decompressed BlsPublicKeys in IndexedDB #3174
base: albatross
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress}; | ||
use idb::{Database, Error, KeyPath, ObjectStore, TransactionMode}; | ||
use nimiq_bls::{G2Projective, LazyPublicKey, PublicKey}; | ||
use nimiq_serde::{Deserialize, Serialize}; | ||
|
||
/// Caches decompressed BlsPublicKeys in an IndexedDB | ||
pub(crate) struct BlsCache { | ||
db: Option<Database>, | ||
keys: Vec<LazyPublicKey>, | ||
} | ||
|
||
#[derive(Serialize, Deserialize, Debug)] | ||
struct BlsKeyEntry { | ||
compressed_key: String, | ||
public_key: String, | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. We talked about adding a "last used" field so we can apply some sort of cache eviction in the future. |
||
} | ||
|
||
impl BlsCache { | ||
pub async fn new() -> Self { | ||
let db = match Database::builder("nimiq_client_cache") | ||
.version(1) | ||
.add_object_store( | ||
ObjectStore::builder("bls_keys") | ||
.key_path(Some(KeyPath::new_single("compressed_key"))), | ||
) | ||
.build() | ||
.await | ||
{ | ||
Ok(db) => Some(db), | ||
Err(err) => { | ||
log::warn!("idb: Couldn't create database {}", err); | ||
None | ||
} | ||
}; | ||
|
||
BlsCache { db, keys: vec![] } | ||
} | ||
|
||
/// Add the given keys into IndexedDB | ||
pub async fn add_keys(&self, keys: Vec<LazyPublicKey>) -> Result<(), Error> { | ||
if let Some(db) = &self.db { | ||
let transaction = db.transaction(&["bls_keys"], TransactionMode::ReadWrite)?; | ||
let bls_keys_store = transaction.object_store("bls_keys")?; | ||
|
||
for key in keys { | ||
let mut result = Vec::new(); | ||
key.uncompress() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a potentially expensive operation. We should probably tell the caller that the keys should have been uncompressed before already. |
||
.unwrap() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is it the function's contract that you may only pass valid keys? If so, we should specify that in the documentation. If not, we should not |
||
.public_key | ||
.serialize_with_mode(&mut result, Compress::No) | ||
.unwrap(); | ||
let public_key = hex::encode(&result); | ||
let compressed_key = hex::encode(key.compressed().serialize_to_vec()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can the data also be stored without the hex encoding, i.e. directly as bytes in the IndexedDB? |
||
|
||
let entry = BlsKeyEntry { | ||
compressed_key, | ||
public_key, | ||
}; | ||
let entry_js_value = serde_wasm_bindgen::to_value(&entry).unwrap(); | ||
bls_keys_store.put(&entry_js_value, None)?.await?; | ||
} | ||
} | ||
Ok(()) | ||
} | ||
|
||
/// Fetches all bls keys from the IndexedDB and stores them, which makes the decompressed keys | ||
/// available in other places. | ||
pub async fn init(&mut self) -> Result<(), Error> { | ||
if let Some(db) = &self.db { | ||
let transaction = db.transaction(&["bls_keys"], TransactionMode::ReadOnly)?; | ||
let bls_keys_store = transaction.object_store("bls_keys")?; | ||
|
||
let js_keys = bls_keys_store.get_all(None, None)?.await?; | ||
|
||
for js_key in &js_keys { | ||
let value: BlsKeyEntry = serde_wasm_bindgen::from_value(js_key.clone()).unwrap(); | ||
let public_key = PublicKey::new( | ||
G2Projective::deserialize_uncompressed_unchecked( | ||
&*hex::decode(value.public_key.clone()).unwrap(), | ||
) | ||
.unwrap(), | ||
); | ||
self.keys.push(LazyPublicKey::from(public_key)); | ||
} | ||
transaction.await?; | ||
} | ||
Ok(()) | ||
} | ||
} |
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -20,7 +20,9 @@ | |||||
}, | ||||||
extras::{panic::initialize_panic_reporting, web_logging::initialize_web_logging}, | ||||||
}; | ||||||
use nimiq_block::Block; | ||||||
use nimiq_blockchain_interface::{AbstractBlockchain, BlockchainEvent}; | ||||||
use nimiq_bls::LazyPublicKey; | ||||||
use nimiq_consensus::ConsensusEvent; | ||||||
use nimiq_hash::Blake2bHash; | ||||||
use nimiq_network_interface::{ | ||||||
|
@@ -43,6 +45,7 @@ | |||||
PlainValidatorType, | ||||||
}, | ||||||
block::{PlainBlock, PlainBlockType}, | ||||||
bls_cache::BlsCache, | ||||||
peer_info::{PlainPeerInfo, PlainPeerInfoArrayType}, | ||||||
}, | ||||||
common::{ | ||||||
|
@@ -113,6 +116,8 @@ | |||||
/// Map from transaction hash as hex string to oneshot sender. | ||||||
/// Used to await transaction events in `send_transaction`. | ||||||
transaction_oneshots: Rc<RefCell<HashMap<String, oneshot::Sender<PlainTransactionDetails>>>>, | ||||||
|
||||||
bls_cache: Rc<RefCell<BlsCache>>, | ||||||
} | ||||||
|
||||||
#[wasm_bindgen] | ||||||
|
@@ -182,6 +187,8 @@ | |||||
let zkp_component = client.take_zkp_component().unwrap(); | ||||||
spawn_local(zkp_component); | ||||||
|
||||||
let bls_cache = BlsCache::new().await; | ||||||
|
||||||
let client = Client { | ||||||
inner: client, | ||||||
network_id: from_network_id(web_config.network_id), | ||||||
|
@@ -192,6 +199,7 @@ | |||||
peer_changed_listeners: Rc::new(RefCell::new(HashMap::with_capacity(1))), | ||||||
transaction_listeners: Rc::new(RefCell::new(HashMap::new())), | ||||||
transaction_oneshots: Rc::new(RefCell::new(HashMap::new())), | ||||||
bls_cache: Rc::new(RefCell::new(bls_cache)), | ||||||
}; | ||||||
|
||||||
client.setup_offline_online_event_handlers(); | ||||||
|
@@ -200,6 +208,10 @@ | |||||
client.setup_network_events(); | ||||||
client.setup_transaction_events().await; | ||||||
|
||||||
if let Err(err) = client.bls_cache.borrow_mut().init().await { | ||||||
Check warning on line 211 in web-client/src/client/lib.rs GitHub Actions / Clippy Reportthis `RefCell` reference is held across an await point
|
||||||
log::warn!("Failed loading bls cache {}", err); | ||||||
} | ||||||
|
||||||
Ok(client) | ||||||
} | ||||||
|
||||||
|
@@ -1039,34 +1051,23 @@ | |||||
let mut blockchain_events = blockchain.read().notifier_as_stream(); | ||||||
|
||||||
let block_listeners = Rc::clone(&self.head_changed_listeners); | ||||||
let bls_cache = Rc::clone(&self.bls_cache); | ||||||
|
||||||
spawn_local(async move { | ||||||
loop { | ||||||
let (hash, reason, reverted_blocks, adopted_blocks) = | ||||||
let (hash, reason, reverted_blocks, adopted_blocks_hashs) = | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
match blockchain_events.next().await { | ||||||
Some(BlockchainEvent::Extended(hash)) => { | ||||||
let adopted_blocks = Array::new(); | ||||||
adopted_blocks.push(&hash.to_hex().into()); | ||||||
|
||||||
(hash, "extended", Array::new(), adopted_blocks) | ||||||
(hash.clone(), "extended", Array::new(), vec![hash]) | ||||||
} | ||||||
Some(BlockchainEvent::HistoryAdopted(hash)) => { | ||||||
let adopted_blocks = Array::new(); | ||||||
adopted_blocks.push(&hash.to_hex().into()); | ||||||
|
||||||
(hash, "history-adopted", Array::new(), adopted_blocks) | ||||||
(hash.clone(), "history-adopted", Array::new(), vec![hash]) | ||||||
} | ||||||
Some(BlockchainEvent::EpochFinalized(hash)) => { | ||||||
let adopted_blocks = Array::new(); | ||||||
adopted_blocks.push(&hash.to_hex().into()); | ||||||
|
||||||
(hash, "epoch-finalized", Array::new(), adopted_blocks) | ||||||
(hash.clone(), "epoch-finalized", Array::new(), vec![hash]) | ||||||
} | ||||||
Some(BlockchainEvent::Finalized(hash)) => { | ||||||
let adopted_blocks = Array::new(); | ||||||
adopted_blocks.push(&hash.to_hex().into()); | ||||||
|
||||||
(hash, "finalized", Array::new(), adopted_blocks) | ||||||
(hash.clone(), "finalized", Array::new(), vec![hash]) | ||||||
} | ||||||
Some(BlockchainEvent::Rebranched(old_chain, new_chain)) => { | ||||||
let hash = &new_chain.last().unwrap().0.clone(); | ||||||
|
@@ -1076,9 +1077,9 @@ | |||||
reverted_blocks.push(&h.to_hex().into()); | ||||||
} | ||||||
|
||||||
let adopted_blocks = Array::new(); | ||||||
let mut adopted_blocks = Vec::new(); | ||||||
for (h, _) in new_chain { | ||||||
adopted_blocks.push(&h.to_hex().into()); | ||||||
adopted_blocks.push(h); | ||||||
} | ||||||
|
||||||
( | ||||||
|
@@ -1089,23 +1090,45 @@ | |||||
) | ||||||
} | ||||||
Some(BlockchainEvent::Stored(block)) => { | ||||||
(block.hash(), "stored", Array::new(), Array::new()) | ||||||
(block.hash(), "stored", Array::new(), Vec::new()) | ||||||
} | ||||||
None => { | ||||||
break; | ||||||
} | ||||||
}; | ||||||
|
||||||
let adopted_blocks = Array::new(); | ||||||
for hash in &adopted_blocks_hashs { | ||||||
adopted_blocks.push(&hash.to_hex().into()); | ||||||
} | ||||||
|
||||||
let args = Array::new(); | ||||||
args.push(&hash.to_hex().into()); | ||||||
args.push(&reason.into()); | ||||||
args.push(&reverted_blocks); | ||||||
args.push(&adopted_blocks); | ||||||
args.push(&adopted_blocks.clone()); | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
|
||||||
let this = JsValue::null(); | ||||||
for listener in block_listeners.borrow().values() { | ||||||
let _ = listener.apply(&this, &args); | ||||||
} | ||||||
|
||||||
// Cache decompressed validator bls keys | ||||||
for hash in &adopted_blocks_hashs { | ||||||
if let Ok(Block::Macro(macro_block)) = blockchain.read().get_block(hash, false) | ||||||
{ | ||||||
if let Some(validators) = macro_block.header.validators { | ||||||
let bls_keys = validators | ||||||
.validators | ||||||
.iter() | ||||||
.map(|validator| validator.voting_key.clone()) | ||||||
.collect::<Vec<LazyPublicKey>>(); | ||||||
if let Err(err) = bls_cache.borrow_mut().add_keys(bls_keys).await { | ||||||
Check warning on line 1126 in web-client/src/client/lib.rs GitHub Actions / Clippy Reportthis `RefCell` reference is held across an await point
|
||||||
log::warn!("Failed caching bls keys {}", err); | ||||||
} | ||||||
} | ||||||
} | ||||||
} | ||||||
} | ||||||
}); | ||||||
} | ||||||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
pub mod account; | ||
pub mod block; | ||
mod bls_cache; | ||
pub mod lib; | ||
pub mod peer_info; |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Alphabetical.