Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add locking to avoid race conditions in cache #118

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ tracing = "0.1"
serde_json = "1.0.116"
serde_with = "3.11.0"
serde = "1.0.197"
fs2 = "0.4.3"
cairo-native = { git = "https://github.com/lambdaclass/cairo_native.git", rev = "6a4efafa26d6a0424dee593d2091206c6e9f428d" }
anyhow = "1.0"
# Sequencer Dependencies
Expand Down
1 change: 1 addition & 0 deletions rpc-state-reader/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ starknet_gateway = { workspace = true }
tracing = { workspace = true }
anyhow.workspace = true
sierra-emu.workspace = true
fs2.workspace = true

[dev-dependencies]
pretty_assertions_sorted = "1.2.3"
Expand Down
49 changes: 45 additions & 4 deletions rpc-state-reader/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@ use std::{
cell::RefCell,
collections::{hash_map::Entry, HashMap},
fs::{self, File},
io::Seek,
path::PathBuf,
};

use blockifier::state::state_api::{StateReader as BlockifierStateReader, StateResult};
use cairo_vm::Felt252;
use fs2::FileExt;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use starknet::core::types::ContractClass;
Expand Down Expand Up @@ -59,8 +61,27 @@ impl Drop for RpcCachedStateReader {
let path = PathBuf::from(format!("rpc_cache/{}.json", self.reader.block_number));
let parent = path.parent().unwrap();
fs::create_dir_all(parent).unwrap();
let file = File::create(path).unwrap();
serde_json::to_writer_pretty(file, &self.state).unwrap();

let mut file = File::options()
.read(true)
.write(true)
.create(true)
.truncate(false)
.open(path)
.unwrap();
file.lock_exclusive().unwrap();

// try to read old cache, and merge it with the current one
if let Ok(old_state) = serde_json::from_reader::<_, RpcCache>(&file) {
merge_cache(self.state.get_mut(), old_state);
}

// overwrite the file with the new cache
file.set_len(0).unwrap();
file.seek(std::io::SeekFrom::Start(0)).unwrap();

serde_json::to_writer_pretty(&file, &self.state).unwrap();
file.unlock().unwrap();
}
}

Expand All @@ -70,9 +91,14 @@ impl RpcCachedStateReader {
let path = PathBuf::from(format!("rpc_cache/{}.json", reader.block_number));

match File::open(path) {
Ok(file) => serde_json::from_reader(file).unwrap(),
Ok(file) => {
file.lock_shared().unwrap();
let state = serde_json::from_reader(&file).unwrap();
file.unlock().unwrap();
state
}
Err(_) => {
warn!("Cache for block {} was not found", reader.block_number);
warn!("Failed to read cache for block {}", reader.block_number);
RpcCache::default()
}
}
Expand Down Expand Up @@ -220,3 +246,18 @@ impl BlockifierStateReader for RpcCachedStateReader {
todo!();
}
}

fn merge_cache(cache: &mut RpcCache, other: RpcCache) {
if cache.block.is_none() {
cache.block = other.block
}
cache.transactions.extend(other.transactions);
cache.contract_classes.extend(other.contract_classes);
cache.storage.extend(other.storage);
cache.nonces.extend(other.nonces);
cache.class_hashes.extend(other.class_hashes);
cache
.transaction_receipts
.extend(other.transaction_receipts);
cache.transaction_traces.extend(other.transaction_traces);
}
Loading