Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: switch to logup (mv_lookup) scheme for lookup argument #30

Merged
merged 8 commits into from
Feb 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ jobs:
with:
command: test
args: --verbose --release --all --all-features
- name: Bench lookup
uses: actions-rs/cargo@v1
with:
command: bench
args: --bench lookups

build:
name: Build target ${{ matrix.target }}
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@
Cargo.lock
.vscode
.DS_Store
**/proptest-regressions/
28 changes: 24 additions & 4 deletions halo2_proofs/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "halo2-axiom"
version = "0.4.2"
version = "0.5.0-alpha.0"
authors = [
"Sean Bowe <[email protected]>",
"Ying Tong Lai <[email protected]>",
Expand Down Expand Up @@ -52,6 +52,10 @@ harness = false
name = "dev_lookup"
harness = false

[[bench]]
name = "lookups"
harness = false

[[bench]]
name = "fft"
harness = false
Expand All @@ -63,7 +67,11 @@ crossbeam = "0.8"
ff = "0.13"
group = "0.13"
pairing = "0.23"
halo2curves = { package = "halo2curves-axiom", version = "0.5.0", default-features = false, features = ["bits", "bn256-table", "derive_serde"] }
halo2curves = { package = "halo2curves-axiom", version = "0.5.0", default-features = false, features = [
"bits",
"bn256-table",
"derive_serde",
] }
rand = "0.8"
rand_core = { version = "0.6", default-features = false }
tracing = "0.1"
Expand All @@ -72,14 +80,18 @@ rustc-hash = "1.1"
sha3 = "0.10"
ark-std = { version = "0.3.0", features = ["print-trace"], optional = true }
maybe-rayon = { version = "0.1.0", default-features = false }
rayon = "1.8.1"

# Developer tooling dependencies
plotters = { version = "0.3.0", default-features = false, optional = true }
tabbycat = { version = "0.1", features = ["attributes"], optional = true }
lazy_static = { version = "1", optional = true }
log = "0.4.17"

# Legacy circuit compatibility
halo2_legacy_pdqsort = { version = "0.1.0", optional = true }


[dev-dependencies]
assert_matches = "1.5"
criterion = "0.3"
Expand All @@ -88,22 +100,30 @@ proptest = "1"
rand_core = { version = "0.6", features = ["getrandom"] }
rand_chacha = "0.3.1"
ark-std = { version = "0.3.0", features = ["print-trace"] }
env_logger = "0.11.1"

[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies]
getrandom = { version = "0.2", features = ["js"] }

[features]
default = ["batch", "multicore", "circuit-params"]
default = ["batch", "multicore", "circuit-params", "logup_skip_inv"]
multicore = ["maybe-rayon/threads"]
dev-graph = ["plotters", "tabbycat"]
test-dev-graph = ["dev-graph", "plotters/bitmap_backend", "plotters/bitmap_encoder", "plotters/ttf"]
test-dev-graph = [
"dev-graph",
"plotters/bitmap_backend",
"plotters/bitmap_encoder",
"plotters/ttf",
]
gadget-traces = ["backtrace"]
# thread-safe-region = []
sanity-checks = []
batch = ["rand/getrandom"]
profile = ["dep:ark-std"]
asm = ["halo2curves/asm"]
circuit-params = []
counter = ["lazy_static"]
logup_skip_inv = []

[lib]
bench = false
Expand Down
240 changes: 240 additions & 0 deletions halo2_proofs/benches/lookups.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@
#[macro_use]
extern crate criterion;

use halo2_axiom::{
circuit::{Layouter, SimpleFloorPlanner, Value},
plonk::*,
poly::kzg::{
commitment::{KZGCommitmentScheme, ParamsKZG},
multiopen::ProverGWC,
multiopen::VerifierGWC,
strategy::SingleStrategy,
},
poly::{commitment::ParamsProver, Rotation},
transcript::{
Blake2bRead, Blake2bWrite, Challenge255, TranscriptReadBuffer, TranscriptWriterBuffer,
},
};
use halo2curves::bn256::{Bn256, G1Affine};
use halo2curves::pairing::Engine;
use rand_core::OsRng;

use std::marker::PhantomData;

use criterion::{BenchmarkId, Criterion};
use ff::PrimeField as Field;

fn criterion_benchmark(c: &mut Criterion) {
#[derive(Clone, Default)]
struct MyCircuit<F: Field> {
_marker: PhantomData<F>,
}

#[derive(Clone)]
struct MyConfig {
selector: Selector,
table: TableColumn,
advice: Column<Advice>,
other_advice: Column<Advice>,
}

impl<F: Field> Circuit<F> for MyCircuit<F> {
type Config = MyConfig;
type FloorPlanner = SimpleFloorPlanner;

fn without_witnesses(&self) -> Self {
Self::default()
}

fn configure(meta: &mut ConstraintSystem<F>) -> MyConfig {
let config = MyConfig {
selector: meta.complex_selector(),
table: meta.lookup_table_column(),
advice: meta.advice_column(),
other_advice: meta.advice_column(),
};

let dummy_selector = meta.complex_selector();

meta.create_gate("degree 6 gate", |meta| {
let dummy_selector = meta.query_selector(dummy_selector);
#[allow(clippy::useless_vec)]
let constraints = vec![dummy_selector.clone(); 4]
.iter()
.fold(dummy_selector.clone(), |acc, val| acc * val.clone());
Constraints::with_selector(dummy_selector, Some(constraints))
});

meta.lookup("lookup", |meta| {
let advice = meta.query_advice(config.advice, Rotation::cur());
vec![(advice, config.table)]
});

meta.lookup("lookup", |meta| {
let advice = meta.query_advice(config.advice, Rotation::cur());
vec![(advice, config.table)]
});

meta.lookup("lookup", |meta| {
let advice = meta.query_advice(config.advice, Rotation::cur());
vec![(advice, config.table)]
});

meta.lookup("lookup", |meta| {
let advice = meta.query_advice(config.advice, Rotation::cur());
vec![(advice, config.table)]
});

meta.lookup("lookup", |meta| {
let advice = meta.query_advice(config.advice, Rotation::cur());
vec![(advice, config.table)]
});

/*
- We need degree at least 6 because 6 - 1 = 5 and we need to go to extended domain of 8n
- Our goal is to get to max degree of 9 because now 9 - 1 = 8 and that will fit into domain

- base degree = table_deg + 2
- if we put input_expression_degree = 1
=> degree = base + 1 = 3 + 1 = 4
- we can batch one more with 5 more lookups
*/

config
}

fn synthesize(
&self,
config: MyConfig,
mut layouter: impl Layouter<F>,
) -> Result<(), Error> {
layouter.assign_table(
|| "8-bit table",
|mut table| {
for row in 0u64..(1 << 8) {
table.assign_cell(
|| format!("row {}", row),
config.table,
row as usize,
|| Value::known(F::from(row)),
)?;
}

Ok(())
},
)?;

layouter.assign_region(
|| "assign values",
|mut region| {
for offset in 0u64..(1 << 10) {
config.selector.enable(&mut region, offset as usize)?;
region.assign_advice(
// || format!("offset {}", offset),
config.advice,
offset as usize,
Value::known(F::from(offset % 256)),
);
}
for offset in 1u64..(1 << 10) {
config.selector.enable(&mut region, offset as usize)?;
region.assign_advice(
// || format!("offset {}", offset),
config.other_advice,
offset as usize - 1,
Value::known(F::from(offset % 256)),
);
}
Ok(())
},
)
}
}

fn keygen(k: u32) -> (ParamsKZG<Bn256>, ProvingKey<G1Affine>) {
let params: ParamsKZG<Bn256> = ParamsKZG::new(k);
let empty_circuit: MyCircuit<<Bn256 as Engine>::Fr> = MyCircuit {
_marker: PhantomData,
};
let vk = keygen_vk(&params, &empty_circuit).expect("keygen_vk should not fail");
let pk = keygen_pk(&params, vk, &empty_circuit).expect("keygen_pk should not fail");
(params, pk)
}

fn prover(_k: u32, params: &ParamsKZG<Bn256>, pk: &ProvingKey<G1Affine>) -> Vec<u8> {
let rng = OsRng;

let circuit: MyCircuit<<Bn256 as Engine>::Fr> = MyCircuit {
_marker: PhantomData,
};

let mut transcript = Blake2bWrite::<_, _, Challenge255<G1Affine>>::init(vec![]);
create_proof::<KZGCommitmentScheme<Bn256>, ProverGWC<'_, Bn256>, _, _, _, _>(
params,
pk,
&[circuit],
&[&[]],
rng,
&mut transcript,
)
.expect("proof generation should not fail");
transcript.finalize()
}

fn verifier(params: &ParamsKZG<Bn256>, vk: &VerifyingKey<G1Affine>, proof: &[u8]) {
let strategy = SingleStrategy::new(params);
let mut transcript = Blake2bRead::<_, _, Challenge255<G1Affine>>::init(proof);
verify_proof::<
KZGCommitmentScheme<Bn256>,
VerifierGWC<'_, Bn256>,
Challenge255<G1Affine>,
Blake2bRead<&[u8], G1Affine, Challenge255<G1Affine>>,
SingleStrategy<'_, Bn256>,
>(params, vk, strategy, &[&[]], &mut transcript)
.unwrap();
}

let k_range = 16..=16;

let mut keygen_group = c.benchmark_group("plonk-keygen");
keygen_group.sample_size(10);
for k in k_range.clone() {
keygen_group.bench_with_input(BenchmarkId::from_parameter(k), &k, |b, &k| {
b.iter(|| keygen(k));
});
}
keygen_group.finish();

let mut prover_group = c.benchmark_group("plonk-prover");
prover_group.sample_size(10);
for k in k_range.clone() {
let (params, pk) = keygen(k);

prover_group.bench_with_input(
BenchmarkId::from_parameter(k),
&(k, &params, &pk),
|b, &(k, params, pk)| {
b.iter(|| prover(k, params, pk));
},
);
}
prover_group.finish();

let mut verifier_group = c.benchmark_group("plonk-verifier");
for k in k_range {
let (params, pk) = keygen(k);
let proof = prover(k, &params, &pk);

verifier_group.bench_with_input(
BenchmarkId::from_parameter(k),
&(&params, pk.get_vk(), &proof[..]),
|b, &(params, vk, proof)| {
b.iter(|| verifier(params, vk, proof));
},
);
}
verifier_group.finish();
}

criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
37 changes: 37 additions & 0 deletions halo2_proofs/src/arithmetic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -322,6 +322,12 @@ where
q
}

pub fn par_invert<F: Field>(values: &mut [F]) {
parallelize(values, |values, _start| {
values.batch_invert();
});
}

/// This utility function will parallelize an operation that is to be
/// performed over a mutable slice.
pub fn parallelize<T: Send, F: Fn(&mut [T], usize) + Send + Sync + Clone>(v: &mut [T], f: F) {
Expand Down Expand Up @@ -374,6 +380,37 @@ pub fn parallelize<T: Send, F: Fn(&mut [T], usize) + Send + Sync + Clone>(v: &mu
});
}

/// This simple utility function will parallelize an operation that is to be
/// performed over a mutable slice.
/// This naive version will have all chunks except the last one of the same size.
/// !! This is important for the mv_lookup prover parallel scan implementation at the moment. !!
pub(crate) fn parallelize_naive<T: Send, F: Fn(&mut [T], usize) + Send + Sync + Clone>(
v: &mut [T],
f: F,
) -> Vec<usize> {
let n = v.len();
let num_threads = multicore::current_num_threads();
let mut chunk = n / num_threads;
if chunk < num_threads {
chunk = 1;
}

multicore::scope(|scope| {
let mut chunk_starts = vec![];
for (chunk_num, v) in v.chunks_mut(chunk).enumerate() {
let f = f.clone();
scope.spawn(move |_| {
let start = chunk_num * chunk;
f(v, start);
});
let start = chunk_num * chunk;
chunk_starts.push(start);
}

chunk_starts
})
}

pub fn log2_floor(num: usize) -> u32 {
assert!(num > 0);

Expand Down
Loading
Loading