Skip to content

Commit

Permalink
chore: Update rust toolchain to nightly 2025 01 09 (#271)
Browse files Browse the repository at this point in the history
  • Loading branch information
gshep authored Feb 5, 2025
1 parent 2b49e54 commit dedb368
Show file tree
Hide file tree
Showing 23 changed files with 180 additions and 206 deletions.
220 changes: 120 additions & 100 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 2 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ bridging-payment-client = { path = "gear-programs/bridging-payment/client" }
vft-manager = { path = "gear-programs/vft-manager" }
vft-manager-app = { path = "gear-programs/vft-manager/app" }
vft-manager-client = { path = "gear-programs/vft-manager/client" }
gear_proof_storage = { path = "gear-programs/proof-storage" }
gear_proof_storage = { path = "gear-programs/proof-storage", default-features = false }
checkpoint_light_client-io = { path = "gear-programs/checkpoint-light-client/io", default-features = false }
utils-prometheus = { path = "utils-prometheus" }
checkpoint_light_client = { path = "gear-programs/checkpoint-light-client", default-features = false }
Expand Down Expand Up @@ -152,7 +152,7 @@ serde_json = "1.0"
sha2 = "0.10"
sled = "0.34.7"
static_assertions = "1.1.0"
thiserror = "1.0.61"
thiserror = { version = "2.0.11", default-features = false }
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
tokio = { version = "1.23.0", features = ["full"] }
tree_hash = { git = "https://github.com/gear-tech/tree_hash.git", branch = "gear-v0.6.0", default-features = false }
Expand All @@ -162,7 +162,6 @@ unroll = "0.1.5"
# Gear/Substrate deps
gstd = { version = "1.6.2", features = ["nightly"] }
gtest = "1.6.2"
gwasm-builder = { version = "1.6.2", package = "gear-wasm-builder" }
gmeta = "1.6.2"
gear-wasm-builder = { version = "1.6.2", default-features = false }
gsdk = "1.6.2"
Expand Down
10 changes: 5 additions & 5 deletions circuits/plonky2_ecdsa/src/curve/curve_msm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub fn msm_precompute<C: Curve>(
}

fn precompute_single_generator<C: Curve>(g: ProjectivePoint<C>, w: usize) -> Vec<AffinePoint<C>> {
let digits = (C::ScalarField::BITS + w - 1) / w;
let digits = C::ScalarField::BITS.div_ceil(w);
let mut powers: Vec<ProjectivePoint<C>> = Vec::with_capacity(digits);
powers.push(g);
for i in 1..digits {
Expand Down Expand Up @@ -69,7 +69,7 @@ pub fn msm_execute<C: Curve>(
) -> ProjectivePoint<C> {
assert_eq!(precomputation.powers_per_generator.len(), scalars.len());
let w = precomputation.w;
let digits = (C::ScalarField::BITS + w - 1) / w;
let digits = C::ScalarField::BITS.div_ceil(w);
let base = 1 << w;

// This is a variant of Yao's method, adapted to the multi-scalar setting. Because we use
Expand Down Expand Up @@ -108,7 +108,7 @@ pub fn msm_execute_parallel<C: Curve>(
) -> ProjectivePoint<C> {
assert_eq!(precomputation.powers_per_generator.len(), scalars.len());
let w = precomputation.w;
let digits = (C::ScalarField::BITS + w - 1) / w;
let digits = C::ScalarField::BITS.div_ceil(w);
let base = 1 << w;

// This is a variant of Yao's method, adapted to the multi-scalar setting. Because we use
Expand Down Expand Up @@ -159,7 +159,7 @@ pub fn msm_execute_parallel<C: Curve>(

pub(crate) fn to_digits<C: Curve>(x: &C::ScalarField, w: usize) -> Vec<usize> {
let scalar_bits = C::ScalarField::BITS;
let num_digits = (scalar_bits + w - 1) / w;
let num_digits = scalar_bits.div_ceil(w);

// Convert x to a bool array.
let x_canonical: Vec<_> = x
Expand All @@ -171,7 +171,7 @@ pub(crate) fn to_digits<C: Curve>(x: &C::ScalarField, w: usize) -> Vec<usize> {
.collect();
let mut x_bits = Vec::with_capacity(scalar_bits);
for i in 0..scalar_bits {
x_bits.push((x_canonical[i / 64] >> (i as u64 % 64) & 1) != 0);
x_bits.push(((x_canonical[i / 64] >> (i as u64 % 64)) & 1) != 0);
}

let mut digits = Vec::with_capacity(num_digits);
Expand Down
2 changes: 1 addition & 1 deletion circuits/plonky2_ecdsa/src/curve/curve_multiplication.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const WINDOW_BITS: usize = 4;
const BASE: usize = 1 << WINDOW_BITS;

fn digits_per_scalar<C: Curve>() -> usize {
(C::ScalarField::BITS + WINDOW_BITS - 1) / WINDOW_BITS
C::ScalarField::BITS.div_ceil(WINDOW_BITS)
}

/// Precomputed state used for scalar x ProjectivePoint multiplications,
Expand Down
2 changes: 1 addition & 1 deletion circuits/plonky2_ecdsa/src/curve/secp256k1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ mod tests {
let mut sum = ProjectivePoint::ZERO;
for limb in lhs.to_canonical_biguint().to_u64_digits().iter() {
for j in 0..64 {
if (limb >> j & 1u64) != 0u64 {
if ((limb >> j) & 1u64) != 0u64 {
sum = sum + g;
}
g = g.double();
Expand Down
2 changes: 1 addition & 1 deletion circuits/plonky2_ed25519/src/curve/curve_multiplication.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const WINDOW_BITS: usize = 4;
const BASE: usize = 1 << WINDOW_BITS;

fn digits_per_scalar<C: Curve>() -> usize {
(C::ScalarField::BITS + WINDOW_BITS - 1) / WINDOW_BITS
C::ScalarField::BITS.div_ceil(WINDOW_BITS)
}

/// Precomputed state used for scalar x ProjectivePoint multiplications,
Expand Down
2 changes: 1 addition & 1 deletion circuits/plonky2_ed25519/src/curve/ed25519.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ pub(crate) fn mul_naive(
let mut sum = ProjectivePoint::ZERO;
for limb in lhs.to_canonical_biguint().to_u64_digits().iter() {
for j in 0..64 {
if (limb >> j & 1u64) != 0u64 {
if ((limb >> j) & 1u64) != 0u64 {
sum = sum + g;
}
g = g.double();
Expand Down
2 changes: 1 addition & 1 deletion ethereum-common/src/base_types/bits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const BITS_PER_BYTE: usize = 8;
///
/// `bit_len == 0` requires a single byte.
fn bytes_for_bit_len(bit_len: usize) -> usize {
core::cmp::max(1, (bit_len + BITS_PER_BYTE - 1) / BITS_PER_BYTE)
core::cmp::max(1, bit_len.div_ceil(BITS_PER_BYTE))
}

type Inner = BitVec<u8, Lsb0>;
Expand Down
6 changes: 2 additions & 4 deletions ethereum-common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -198,9 +198,7 @@ where

match T::tree_hash_type() {
TreeHashType::Basic => {
let mut hasher = MerkleHasher::with_leaves(
(N + T::tree_hash_packing_factor() - 1) / T::tree_hash_packing_factor(),
);
let mut hasher = MerkleHasher::with_leaves(N.div_ceil(T::tree_hash_packing_factor()));

for item in vec {
hasher
Expand Down Expand Up @@ -235,7 +233,7 @@ pub fn bitfield_bytes_tree_hash_root<const N: usize>(bytes: &[u8]) -> Hash256 {
use tree_hash::{MerkleHasher, BYTES_PER_CHUNK};

let byte_size = (N + 7) / 8;
let leaf_count = (byte_size + BYTES_PER_CHUNK - 1) / BYTES_PER_CHUNK;
let leaf_count = byte_size.div_ceil(BYTES_PER_CHUNK);

let mut hasher = MerkleHasher::with_leaves(leaf_count);

Expand Down
2 changes: 1 addition & 1 deletion ethereum/client/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ primitive-types.workspace = true
reqwest.workspace = true
binary-merkle-tree.workspace = true
ahash.workspace = true
thiserror.workspace = true
thiserror = { workspace = true, features = ["std"] }
keccak-hash.workspace = true
sp-core = { workspace = true, features = ["std"] }
alloy.workspace = true
28 changes: 8 additions & 20 deletions gear-programs/bridging-payment/app/src/services/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,35 +95,23 @@ where
}

fn state(&self) -> &State {
unsafe {
STATE
.as_ref()
.expect("BridgingPayment::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw const STATE).as_ref() }.expect("BridgingPayment::seed() should be called")
}

fn state_mut(&mut self) -> &mut State {
unsafe {
STATE
.as_mut()
.expect("BridgingPayment::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut STATE).as_mut() }.expect("BridgingPayment::seed() should be called")
}

fn config(&self) -> &Config {
unsafe {
CONFIG
.as_ref()
.expect("BridgingPayment::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw const CONFIG).as_ref() }.expect("BridgingPayment::seed() should be called")
}

fn config_mut(&mut self) -> &mut Config {
unsafe {
CONFIG
.as_mut()
.expect("BridgingPayment::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut CONFIG).as_mut() }.expect("BridgingPayment::seed() should be called")
}
}

Expand Down
4 changes: 4 additions & 0 deletions gear-programs/bridging-payment/app/tests/gclient.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
// Clippy warns about some imports below so disable the check.
// Remove the directive once the clippy happy.
#![allow(clippy::literal_string_with_formatting_args)]

use anyhow::anyhow;
use bridging_payment::WASM_BINARY as WASM_BRIDGING_PAYMENT;
use bridging_payment_client::traits::*;
Expand Down
6 changes: 1 addition & 5 deletions gear-programs/proof-storage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,6 @@ thiserror.workspace = true
[build-dependencies]
gear-wasm-builder.workspace = true

[lib]
crate-type = ["rlib"]
name = "gear_proof_storage"

[features]
std = []
std = ["thiserror/std", "parity-scale-codec/std", "scale-info/std"]
default = ["std"]
10 changes: 3 additions & 7 deletions gear-programs/proof-storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,11 @@ pub use code::WASM_BINARY_OPT as WASM_BINARY;
#[cfg(not(feature = "std"))]
mod wasm;

#[cfg_attr(feature = "std", derive(thiserror::Error))]
#[derive(Debug, Decode, Encode, TypeInfo)]
#[derive(Debug, Decode, Encode, TypeInfo, thiserror::Error)]
pub enum Error {
#[cfg_attr(feature = "std", error("Authority set id is not sequential"))]
#[error("Authority set id is not sequential")]
AuthoritySetIdNotSequential,
#[cfg_attr(
feature = "std",
error("Two or more proofs submitted at the same block")
)]
#[error("Two or more proofs submitted at the same block")]
ManyProofsSubmittedInSameBlock,
}

Expand Down
13 changes: 6 additions & 7 deletions gear-programs/vft-manager/app/src/services/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -395,20 +395,19 @@ where

/// Get a reference to the global [State].
fn state(&self) -> &State {
unsafe { STATE.as_ref().expect("VftManager::seed() should be called") }
#[allow(clippy::deref_addrof)]
unsafe { (*&raw const STATE).as_ref() }.expect("VftManager::seed() should be called")
}

/// Get a mutable reference to the global [State].
fn state_mut(&mut self) -> &mut State {
unsafe { STATE.as_mut().expect("VftManager::seed() should be called") }
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut STATE).as_mut() }.expect("VftManager::seed() should be called")
}

/// Get a reference to the global [Config].
fn config(&self) -> &Config {
unsafe {
CONFIG
.as_ref()
.expect("VftManager::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw const CONFIG).as_ref() }.expect("VftManager::seed() should be called")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -63,24 +63,13 @@ pub fn init() {

/// Fetch state of this message tracker.
pub fn msg_tracker_state() -> Vec<(MessageId, MessageInfo)> {
unsafe {
MSG_TRACKER
.as_mut()
.expect("VftManager::seed() should be called")
}
.message_info
.clone()
.into_iter()
.collect()
msg_tracker_mut().message_info.clone().into_iter().collect()
}

/// Get mutable reference to a global message tracker.
pub fn msg_tracker_mut() -> &'static mut MessageTracker {
unsafe {
MSG_TRACKER
.as_mut()
.expect("VftManager::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut MSG_TRACKER).as_mut() }.expect("VftManager::seed() should be called")
}

impl MessageTracker {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,8 @@ const TX_HISTORY_DEPTH: usize = 500_000;

/// Get mutable reference to a transactions storage.
fn transactions_mut() -> &'static mut BTreeSet<(u64, u64)> {
unsafe {
TRANSACTIONS
.as_mut()
.expect("Program should be constructed")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut TRANSACTIONS).as_mut() }.expect("Program should be constructed")
}

/// Initialize state that's used by this VFT Manager method.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,24 +51,13 @@ pub fn init() {

/// Fetch state of this message tracker.
pub fn msg_tracker_state() -> Vec<(MessageId, MessageInfo)> {
unsafe {
MSG_TRACKER
.as_mut()
.expect("VftManager::seed() should be called")
}
.message_info
.clone()
.into_iter()
.collect()
msg_tracker_mut().message_info.clone().into_iter().collect()
}

/// Get mutable reference to a global message tracker.
pub fn msg_tracker_mut() -> &'static mut MessageTracker {
unsafe {
MSG_TRACKER
.as_mut()
.expect("VftManager::seed() should be called")
}
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut MSG_TRACKER).as_mut() }.expect("VftManager::seed() should be called")
}

impl MessageTracker {
Expand All @@ -94,9 +83,4 @@ impl MessageTracker {
pub fn get_message_info(&self, msg_id: &MessageId) -> Option<&MessageInfo> {
self.message_info.get(msg_id)
}

/// Stop tracking message state. It will return current state of the target message.
pub fn remove_message_info(&mut self, msg_id: &MessageId) -> Option<MessageInfo> {
self.message_info.remove(msg_id)
}
}
15 changes: 9 additions & 6 deletions gear-programs/vft-service/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,21 @@ pub struct Storage {

impl Storage {
pub fn get_mut() -> &'static mut Self {
unsafe { STORAGE.as_mut().expect("Storage is not initialized") }
#[allow(clippy::deref_addrof)]
unsafe { (*&raw mut STORAGE).as_mut() }.expect("Storage is not initialized")
}

pub fn get() -> &'static Self {
unsafe { STORAGE.as_ref().expect("Storage is not initialized") }
#[allow(clippy::deref_addrof)]
unsafe { (*&raw const STORAGE).as_ref() }.expect("Storage is not initialized")
}

pub fn balances() -> &'static mut HashMap<ActorId, U256> {
let storage = unsafe { STORAGE.as_mut().expect("Storage is not initialized") };
&mut storage.balances
&mut Self::get_mut().balances
}

pub fn total_supply() -> &'static mut U256 {
let storage = unsafe { STORAGE.as_mut().expect("Storage is not initialized") };
&mut storage.total_supply
&mut Self::get_mut().total_supply
}
}

Expand Down
2 changes: 1 addition & 1 deletion prover/src/common/poseidon_bn128/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ impl<'de, F: RichField> Deserialize<'de> for PoseidonBN128HashOut<F> {
{
struct PoseidonBN128HashOutVisitor;

impl<'a> serde::de::Visitor<'a> for PoseidonBN128HashOutVisitor {
impl serde::de::Visitor<'_> for PoseidonBN128HashOutVisitor {
type Value = String;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Expand Down
4 changes: 2 additions & 2 deletions relayer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ edition.workspace = true
[dependencies]
bridging-payment-client.workspace = true
ethereum-client.workspace = true
gear_proof_storage.workspace = true
gear_proof_storage = { workspace = true, features = ["std"] }
gear-rpc-client.workspace = true
prover.workspace = true

Expand Down Expand Up @@ -47,7 +47,7 @@ sails-rs = { workspace = true, features = ["gclient"] }
serde.workspace = true
serde_json.workspace = true
sled.workspace = true
thiserror.workspace = true
thiserror = { workspace = true, features = ["std"] }
tokio.workspace = true
utils-prometheus.workspace = true

Expand Down
2 changes: 1 addition & 1 deletion relayer/src/ethereum_checkpoints/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ async fn replay_back_and_updating() -> Result<()> {
let headers: Vec<BeaconBlockHeaderResponse> = serde_json::from_slice(&headers[..]).unwrap();

// start to replay back
let size_batch = 40 * SLOTS_PER_EPOCH as usize;
let size_batch = 30 * SLOTS_PER_EPOCH as usize;
let payload = Handle::ReplayBackStart {
sync_update: utils::sync_update_from_finality(
decode_signature(&finality_update.sync_aggregate),
Expand Down
Loading

0 comments on commit dedb368

Please sign in to comment.