Skip to content

Commit

Permalink
chore: fix building master branch
Browse files Browse the repository at this point in the history
The current build fails due to:

error: package `home v0.5.11` cannot be built because it requires rustc 1.81
or newer, while the currently active rustc version is 1.70.0

This commit increases the Rust version from 1.70.0 to 1.81.0.

It also fixes all Clippy warnings that were introduced in the newer Rust versions.
  • Loading branch information
vmx committed Jan 3, 2025
1 parent 7b520ad commit 3ac13d6
Show file tree
Hide file tree
Showing 19 changed files with 38 additions and 31 deletions.
3 changes: 1 addition & 2 deletions fil-proofs-param/src/bin/paramfetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,8 @@ struct FetchProgress<R> {

impl<R: Read> Read for FetchProgress<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.reader.read(buf).map(|n| {
self.reader.read(buf).inspect(|&n| {
self.progress_bar.add(n as u64);
n
})
}
}
Expand Down
6 changes: 3 additions & 3 deletions fil-proofs-tooling/src/bin/benchy/porep.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&staged_file_path)
} else {
info!("*** Creating staged file");
OpenOptions::new().read(true).write(true).create(true).open(&staged_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&staged_file_path)
}?;

let sealed_file_path = cache_dir.join(SEALED_FILE);
Expand All @@ -103,7 +103,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&sealed_file_path)
} else {
info!("*** Creating sealed file");
OpenOptions::new().read(true).write(true).create(true).open(&sealed_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&sealed_file_path)
}?;

let sector_size_unpadded_bytes_amount =
Expand All @@ -120,7 +120,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
.collect();

info!("*** Created piece file");
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).open(&piece_file_path)?;
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&piece_file_path)?;
piece_file.write_all(&piece_bytes)?;
piece_file.sync_all()?;
piece_file.rewind()?;
Expand Down
6 changes: 3 additions & 3 deletions fil-proofs-tooling/src/bin/benchy/window_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&staged_file_path)
} else {
info!("*** Creating staged file");
OpenOptions::new().read(true).write(true).create(true).open(&staged_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&staged_file_path)
}?;

let sealed_file_path = cache_dir.join(SEALED_FILE);
Expand All @@ -110,7 +110,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
OpenOptions::new().read(true).write(true).open(&sealed_file_path)
} else {
info!("*** Creating sealed file");
OpenOptions::new().read(true).write(true).create(true).open(&sealed_file_path)
OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&sealed_file_path)
}?;

let sector_size_unpadded_bytes_amount =
Expand All @@ -128,7 +128,7 @@ fn run_pre_commit_phases<Tree: 'static + MerkleTreeTrait>(
.collect();

info!("*** Created piece file");
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).open(&piece_file_path)?;
let mut piece_file = OpenOptions::new().read(true).write(true).create(true).truncate(true).open(&piece_file_path)?;
piece_file.write_all(&piece_bytes)?;
piece_file.sync_all()?;
piece_file.rewind()?;
Expand Down
4 changes: 2 additions & 2 deletions fil-proofs-tooling/src/bin/benchy/winning_post.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,8 @@ pub fn run_fallback_post_bench<Tree: 'static + MerkleTreeTrait>(
create_replica::<Tree>(sector_size, fake_replica, api_version, api_features);

// Store the replica's private and publicly facing info for proving and verifying respectively.
let pub_replica_info = vec![(sector_id, replica_output.public_replica_info.clone())];
let priv_replica_info = vec![(sector_id, replica_output.private_replica_info.clone())];
let pub_replica_info = [(sector_id, replica_output.public_replica_info.clone())];
let priv_replica_info = [(sector_id, replica_output.private_replica_info.clone())];

let post_config = PoStConfig {
sector_size: sector_size.into(),
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/bin/gpu-cpu-test/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ fn thread_fun(
) -> RunInfo {
let timing = Instant::now();
let mut iteration = 0;
while iteration < std::u8::MAX {
while iteration < u8::MAX {
info!("iter {}", iteration);

// This is the higher priority proof, get it on the GPU even if there is one running
Expand Down
2 changes: 1 addition & 1 deletion fil-proofs-tooling/src/shared.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ pub fn create_replicas<Tree: 'static + MerkleTreeTrait>(
let priv_infos = sealed_files
.iter()
.zip(seal_pre_commit_outputs.return_value.iter())
.zip(cache_dirs.into_iter())
.zip(cache_dirs)
.map(|((sealed_file, seal_pre_commit_output), cache_dir)| {
PrivateReplicaInfo::new(
sealed_file.to_path_buf(),
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ where
/// # Arguments
///
/// * `source` - a readable source of unprocessed piece bytes. The piece's commitment will be
/// generated for the bytes read from the source plus any added padding.
/// generated for the bytes read from the source plus any added padding.
/// * `piece_size` - the number of unpadded user-bytes which can be read from source before EOF.
pub fn generate_piece_commitment<T: Read>(
source: T,
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/chunk_iter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ impl<R: Read> Iterator for ChunkIterator<R> {
match self.reader.read_many(&mut buffer) {
Ok(bytes_read) if bytes_read == self.chunk_size => Some(Ok(buffer)),
// A position of 0 indicates end of file.
Ok(bytes_read) if bytes_read == 0 => None,
Ok(0) => None,
Ok(bytes_read) => Some(Ok(buffer[..bytes_read].to_vec())),
Err(error) => Some(Err(error)),
}
Expand Down
2 changes: 1 addition & 1 deletion filecoin-proofs/src/types/private_replica_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ impl<Tree: MerkleTreeTrait> Ord for PrivateReplicaInfo<Tree> {

impl<Tree: MerkleTreeTrait> PartialOrd for PrivateReplicaInfo<Tree> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.comm_r.as_ref().partial_cmp(other.comm_r.as_ref())
Some(self.cmp(other))
}
}

Expand Down
1 change: 1 addition & 0 deletions fr32/src/convert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ pub enum Error {
/// Invariants:
/// - Value of each 32-byte chunks MUST represent valid Frs.
/// - Total length must be a multiple of 32.
///
/// That is to say: each 32-byte chunk taken alone must be a valid Fr32.
pub type Fr32Vec = Vec<u8>;

Expand Down
8 changes: 4 additions & 4 deletions fr32/src/padding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -563,14 +563,14 @@ need to handle the potential bit-level misalignments:
// offset and num_bytes are based on the unpadded data, so
// if [0, 1, ..., 255] was the original unpadded data, offset 3 and len 4 would return
// [3, 4, 5, 6].
pub fn write_unpadded<W: ?Sized>(
pub fn write_unpadded<W>(
source: &[u8],
target: &mut W,
offset: usize,
len: usize,
) -> io::Result<usize>
where
W: Write,
W: Write + ?Sized,
{
// Check that there's actually `len` raw data bytes encoded inside
// `source` starting at `offset`.
Expand Down Expand Up @@ -630,15 +630,15 @@ The reader will generally operate with bit precision, even if the padded
layout is byte-aligned (no extra bits) the data inside it isn't (since
we pad at the bit-level).
**/
fn write_unpadded_aux<W: ?Sized>(
fn write_unpadded_aux<W>(
padding_map: &PaddingMap,
source: &[u8],
target: &mut W,
write_pos: usize,
max_write_size: usize,
) -> io::Result<usize>
where
W: Write,
W: Write + ?Sized,
{
// Position of the reader in the padded bit stream layout, deduced from
// the position of the writer (`write_pos`) in the raw data layout.
Expand Down
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.70.0
1.81.0
5 changes: 1 addition & 4 deletions storage-proofs-core/src/gadgets/insertion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -321,10 +321,7 @@ pub fn pick<Scalar: PrimeField, CS: ConstraintSystem<Scalar>>(
condition: &Boolean,
a: &AllocatedNum<Scalar>,
b: &AllocatedNum<Scalar>,
) -> Result<AllocatedNum<Scalar>, SynthesisError>
where
CS: ConstraintSystem<Scalar>,
{
) -> Result<AllocatedNum<Scalar>, SynthesisError> {
let c = AllocatedNum::alloc(cs.namespace(|| "pick result"), || {
if condition
.get_value()
Expand Down
2 changes: 1 addition & 1 deletion storage-proofs-core/src/parameter_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ where
let param_identifier = pub_params.identifier();
info!("parameter set identifier for cache: {}", param_identifier);
let mut hasher = Sha256::default();
hasher.update(&param_identifier.into_bytes());
hasher.update(param_identifier.into_bytes());
let circuit_hash = hasher.finalize();
format!(
"{}-{:02x}",
Expand Down
1 change: 1 addition & 0 deletions storage-proofs-core/src/test_helper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ pub fn setup_replica(data: &[u8], replica_path: &Path) -> MmapMut {
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(replica_path)
.expect("Failed to create replica");
f.write_all(data).expect("Failed to write data to replica");
Expand Down
1 change: 1 addition & 0 deletions storage-proofs-porep/build.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
fn main() {
println!("cargo::rustc-check-cfg=cfg(nightly)");
cfg_if_nightly()
}

Expand Down
11 changes: 9 additions & 2 deletions storage-proofs-porep/src/stacked/vanilla/cache.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use std::collections::{BTreeMap, HashSet};
use std::fmt::Write;
use std::fs::{remove_file, File};
use std::io;
use std::path::{Path, PathBuf};
Expand Down Expand Up @@ -250,7 +251,10 @@ impl ParentCache {
drop(data);

let hash = hasher.finalize();
digest_hex = hash.iter().map(|x| format!("{:01$x}", x, 2)).collect();
digest_hex = hash.iter().fold(String::new(), |mut output, x| {
let _write_never_fails = write!(output, "{:01$x}", x, 2);
output
});

info!(
"[open] parent cache: calculated consistency digest: {:?}",
Expand Down Expand Up @@ -343,7 +347,10 @@ impl ParentCache {
let mut hasher = Sha256::new();
hasher.update(&data);
let hash = hasher.finalize();
digest_hex = hash.iter().map(|x| format!("{:01$x}", x, 2)).collect();
digest_hex = hash.iter().fold(String::new(), |mut output, x| {
let _write_never_fails = write!(output, "{:01$x}", x, 2);
output
});
info!(
"[generate] parent cache: generated consistency digest: {:?}",
digest_hex
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ pub fn create_labels_for_encoding<
// This could fail, but we will ignore the error if so.
// It will be logged as a warning by `bind_core`.
debug!("binding core in main thread");
group.get(0).map(|core_index| bind_core(*core_index))
group.first().map(|core_index| bind_core(*core_index))
});

// NOTE: this means we currently keep 2x sector size around, to improve speed
Expand Down Expand Up @@ -564,7 +564,7 @@ pub fn create_labels_for_decoding<Tree: 'static + MerkleTreeTrait, T: AsRef<[u8]
// This could fail, but we will ignore the error if so.
// It will be logged as a warning by `bind_core`.
debug!("binding core in main thread");
group.get(0).map(|core_index| bind_core(*core_index))
group.first().map(|core_index| bind_core(*core_index))
});

// NOTE: this means we currently keep 2x sector size around, to improve speed
Expand Down
5 changes: 3 additions & 2 deletions storage-proofs-porep/src/stacked/vanilla/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,12 +192,11 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
t_aux.synth_proofs_path(),
partition_count,
)
.map_err(|error| {
.inspect_err(|_| {
info!(
"failed to read porep proofs from synthetic proofs file: {:?}",
t_aux.synth_proofs_path(),
);
error
})
}
}
Expand Down Expand Up @@ -1503,6 +1502,7 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
);
let mut f = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(&tree_r_last_path)
.expect("failed to open file for tree_r_last");
Expand Down Expand Up @@ -1912,6 +1912,7 @@ impl<'a, Tree: 'static + MerkleTreeTrait, G: 'static + Hasher> StackedDrg<'a, Tr
);
let mut f = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(&tree_r_last_path)
.expect("failed to open file for tree_r_last");
Expand Down

0 comments on commit 3ac13d6

Please sign in to comment.