Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix the typos #549

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions jolt-core/src/poly/commitment/zeromorph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -635,7 +635,7 @@ mod test {
use ark_std::{test_rng, UniformRand};
use rand_core::SeedableRng;

// Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula
// Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficient formula
fn phi<P: Pairing>(challenge: &P::ScalarField, subscript: usize) -> P::ScalarField {
let len = (1 << subscript) as u64;
(0..len).fold(P::ScalarField::zero(), |mut acc, i| {
Expand Down Expand Up @@ -900,7 +900,7 @@ mod test {
&mut prover_transcript,
)
.unwrap();
let p_transcipt_squeeze: <Bn254 as Pairing>::ScalarField =
let p_transcript_squeeze: <Bn254 as Pairing>::ScalarField =
prover_transcript.challenge_scalar();

// Verify proof.
Expand All @@ -914,10 +914,10 @@ mod test {
&mut verifier_transcript,
)
.unwrap();
let v_transcipt_squeeze: <Bn254 as Pairing>::ScalarField =
let v_transcript_squeeze: <Bn254 as Pairing>::ScalarField =
verifier_transcript.challenge_scalar();

assert_eq!(p_transcipt_squeeze, v_transcipt_squeeze);
assert_eq!(p_transcript_squeeze, v_transcript_squeeze);

// evaluate bad proof for soundness
let altered_verifier_point = point
Expand Down Expand Up @@ -968,7 +968,7 @@ mod test {
&evals,
&mut prover_transcript,
);
let p_transcipt_squeeze: <Bn254 as Pairing>::ScalarField =
let p_transcript_squeeze: <Bn254 as Pairing>::ScalarField =
prover_transcript.challenge_scalar();

// Verify proof.
Expand All @@ -982,10 +982,10 @@ mod test {
&mut verifier_transcript,
)
.unwrap();
let v_transcipt_squeeze: <Bn254 as Pairing>::ScalarField =
let v_transcript_squeeze: <Bn254 as Pairing>::ScalarField =
verifier_transcript.challenge_scalar();

assert_eq!(p_transcipt_squeeze, v_transcipt_squeeze);
assert_eq!(p_transcript_squeeze, v_transcript_squeeze);

// evaluate bad proof for completeness
let altered_verifier_point = point
Expand Down
2 changes: 1 addition & 1 deletion jolt-core/src/poly/sparse_interleaved_poly.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ impl<F: JoltField> SparseInterleavedPolynomial<F> {
continue;
}
if coeff.index % 2 == 0 {
// Left node; try to find correspoding right node
// Left node; try to find corresponding right node
let right = segment
.get(j + 1)
.cloned()
Expand Down
4 changes: 2 additions & 2 deletions jolt-core/src/subprotocols/grand_product_quarks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,10 +287,10 @@ pub enum QuarkError {
#[error("InvalidSumcheck")]
InvalidQuarkSumcheck,
/// Returned if a quark opening proof fails
#[error("IvalidOpeningProof")]
#[error("InvalidOpeningProof")]
InvalidOpeningProof,
/// Returned if eq(tau, r)*(f(1, r) - f(r, 0)*f(r,1)) does not match the result from sumcheck
#[error("IvalidOpeningProof")]
#[error("InvalidOpeningProof")]
InvalidBinding,
}

Expand Down
20 changes: 10 additions & 10 deletions jolt-core/src/subprotocols/sparse_grand_product.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ struct BatchedGrandProductToggleLayer<F: JoltField> {
flag_values: Vec<Vec<F>>,
/// The Reed-Solomon fingerprints for each circuit in the batch.
fingerprints: Vec<Vec<F>>,
/// Once the sparse flag/fingerprint vectors cannnot be bound further
/// Once the sparse flag/fingerprint vectors cannot be bound further
/// (i.e. binding would require processing values in different vectors),
/// we switch to using `coalesced_flags` to represent the flag values.
coalesced_flags: Option<Vec<F>>,
/// Once the sparse flag/fingerprint vectors cannnot be bound further
/// Once the sparse flag/fingerprint vectors cannot be bound further
/// (i.e. binding would require processing values in different vectors),
/// we switch to using `coalesced_fingerprints` to represent the fingerprint values.
coalesced_fingerprints: Option<Vec<F>>,
Expand Down Expand Up @@ -210,11 +210,11 @@ impl<F: JoltField> Bindable<F> for BatchedGrandProductToggleLayer<F> {
}
self.coalesced_flags = Some(bound_flags);

let coalesced_fingerpints = self.coalesced_fingerprints.as_mut().unwrap();
let mut bound_fingerprints = vec![F::zero(); coalesced_fingerpints.len() / 2];
let coalesced_fingerprints = self.coalesced_fingerprints.as_mut().unwrap();
let mut bound_fingerprints = vec![F::zero(); coalesced_fingerprints.len() / 2];
for i in 0..bound_fingerprints.len() {
bound_fingerprints[i] = coalesced_fingerpints[2 * i]
+ r * (coalesced_fingerpints[2 * i + 1] - coalesced_fingerpints[2 * i]);
bound_fingerprints[i] = coalesced_fingerprints[2 * i]
+ r * (coalesced_fingerprints[2 * i + 1] - coalesced_fingerprints[2 * i]);
}
self.coalesced_fingerprints = Some(bound_fingerprints);
self.batched_layer_len /= 2;
Expand Down Expand Up @@ -399,14 +399,14 @@ impl<F: JoltField, ProofTranscript: Transcript> BatchedCubicSumcheck<F, ProofTra
#[tracing::instrument(skip_all, name = "BatchedGrandProductToggleLayer::compute_cubic")]
fn compute_cubic(&self, eq_poly: &SplitEqPolynomial<F>, previous_round_claim: F) -> UniPoly<F> {
if let Some(coalesced_flags) = &self.coalesced_flags {
let coalesced_fingerpints = self.coalesced_fingerprints.as_ref().unwrap();
let coalesced_fingerprints = self.coalesced_fingerprints.as_ref().unwrap();

let cubic_evals = if eq_poly.E1_len == 1 {
// 1. Flags/fingerprints are coalesced, and E1 is fully bound
// This is similar to the if case of `DenseInterleavedPolynomial::compute_cubic`
coalesced_flags
.par_chunks(2)
.zip(coalesced_fingerpints.par_chunks(2))
.zip(coalesced_fingerprints.par_chunks(2))
.zip(eq_poly.E2.par_chunks(2))
.map(|((flags, fingerprints), eq_chunk)| {
let eq_evals = {
Expand Down Expand Up @@ -453,12 +453,12 @@ impl<F: JoltField, ProofTranscript: Transcript> BatchedCubicSumcheck<F, ProofTra

let flag_chunk_size = coalesced_flags.len().next_power_of_two() / eq_poly.E2_len;
let fingerprint_chunk_size =
coalesced_fingerpints.len().next_power_of_two() / eq_poly.E2_len;
coalesced_fingerprints.len().next_power_of_two() / eq_poly.E2_len;

eq_poly.E2[..eq_poly.E2_len]
.par_iter()
.zip(coalesced_flags.par_chunks(flag_chunk_size))
.zip(coalesced_fingerpints.par_chunks(fingerprint_chunk_size))
.zip(coalesced_fingerprints.par_chunks(fingerprint_chunk_size))
.map(|((E2_eval, flag_x2), fingerprint_x2)| {
let mut inner_sum = (F::zero(), F::zero(), F::zero());
for ((E1_evals, flag_chunk), fingerprint_chunk) in E1_evals
Expand Down
10 changes: 5 additions & 5 deletions tracer/src/emulator/device/virtio_block_disk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ impl VirtioBlockDisk {
///
/// # Arguments
/// * `memory`
/// * `mem_addresss` Physical address. Must be eight-byte aligned.
/// * `mem_address` Physical address. Must be eight-byte aligned.
/// * `disk_address` Must be eight-byte aligned.
/// * `length` Must be eight-byte aligned.
fn transfer_from_disk(
Expand Down Expand Up @@ -350,7 +350,7 @@ impl VirtioBlockDisk {
///
/// # Arguments
/// * `memory`
/// * `mem_addresss` Physical address. Must be eight-byte aligned.
/// * `mem_address` Physical address. Must be eight-byte aligned.
/// * `disk_address` Must be eight-byte aligned.
/// * `length` Must be eight-byte aligned.
fn transfer_to_disk(
Expand Down Expand Up @@ -384,7 +384,7 @@ impl VirtioBlockDisk {
/// Reads a byte from disk.
///
/// # Arguments
/// * `addresss` Address in disk
/// * `address` Address in disk
fn read_from_disk(&mut self, address: u64) -> u8 {
let index = (address >> 3) as usize;
let pos = (address % 8) * 8;
Expand All @@ -394,7 +394,7 @@ impl VirtioBlockDisk {
/// Writes a byte to disk.
///
/// # Arguments
/// * `addresss` Address in disk
/// * `address` Address in disk
/// * `value` Data written to disk
fn write_to_disk(&mut self, address: u64, value: u8) {
let index = (address >> 3) as usize;
Expand Down Expand Up @@ -453,7 +453,7 @@ impl VirtioBlockDisk {
(self.get_base_avail_address() + 4 + queue_size * 2).div_ceil(align) * align
}

// @TODO: Follow the virtio block specification more propertly.
// @TODO: Follow the virtio block specification more properly.
fn handle_disk_access(&mut self, memory: &mut MemoryWrapper) {
let base_desc_address = self.get_base_desc_address();
let base_avail_address = self.get_base_avail_address();
Expand Down
6 changes: 3 additions & 3 deletions tracer/src/emulator/elf_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -658,9 +658,9 @@ impl ElfAnalyzer {
string_table_section_headers: &Vec<&SectionHeader>,
) -> Option<u64> {
let tohost_values = [0x2e, 0x74, 0x6f, 0x68, 0x6f, 0x73, 0x74, 0x00]; // ".tohost\null"
for progrma_data_header in program_data_section_headers {
let sh_addr = progrma_data_header.sh_addr;
let sh_name = progrma_data_header.sh_name as u64;
for program_data_header in program_data_section_headers {
let sh_addr = program_data_header.sh_addr;
let sh_name = program_data_header.sh_name as u64;
// Find all string sections so far.
// @TODO: Is there a way to know which string table section
// sh_name of program data section points to?
Expand Down
2 changes: 1 addition & 1 deletion tracer/src/emulator/mmu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -532,7 +532,7 @@ impl Mmu {
false => match effective_address {
// I don't know why but dtb data seems to be stored from 0x1020 on Linux.
// It might be from self.x[0xb] initialization?
// And DTB size is arbitray.
// And DTB size is arbitrary.
0x00001020..=0x00001fff => self.dtb[effective_address as usize - 0x1020],
0x02000000..=0x0200ffff => self.clint.load(effective_address),
0x0C000000..=0x0fffffff => self.plic.load(effective_address),
Expand Down