Skip to content

Commit

Permalink
Remove empty machines (#2078)
Browse files Browse the repository at this point in the history
With this PR, if a block machine never receives a call *and* has dynamic
size, it is skipped entirely in the proof (for all backends that support
dynamically-sized machines, Plonky3 and the Composite backend). This is
sound if we treat a missing machine as not interacting at all with the
bus.

As a result, we can remove the "dummy calls" in the RISC-V machine,
which ensure that each block machine is called at least once.

In slightly more detail:
- If a block machine never receives a call, witgen returns columns of
length 0.
- When proving, we detect that and remove the machine entirely.
- The verifier is relaxed in that it no longer asserts that all machines
are being proven. As mentioned, this assumes that the bus argument
(which is not fully implemented) handles this accordingly, using a bus
accumulator of zero for the missing machine.

To test:
```
$ cargo run pil test_data/asm/block_to_block_empty_submachine.asm -o output -f --export-witness-csv --prove-with plonky3
...
Running main machine for 8 rows
[00:00:00 (ETA: 00:00:00)] ████████████████████ 100% - Starting...                                                     
Machine Secondary machine 0: main_arith (BlockMachine) is never used at runtime, so we remove it.
Witness generation took 0.002950166s
Writing output/commits.bin.
Writing output/block_to_block_empty_submachine_witness.csv.
Backend setup for plonky3...
Setup took 0.023712292s
Proof generation took 0.083828546s
Proof size: 80517 bytes
Writing output/block_to_block_empty_submachine_proof.bin.
```
  • Loading branch information
georgwiese authored Nov 14, 2024
1 parent 9037386 commit ed6e4f7
Show file tree
Hide file tree
Showing 8 changed files with 210 additions and 68 deletions.
6 changes: 5 additions & 1 deletion ast/src/analyzed/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -700,8 +700,12 @@ impl From<DegreeType> for DegreeRange {
}

impl DegreeRange {
pub fn is_unique(&self) -> bool {
self.min == self.max
}

pub fn try_into_unique(self) -> Option<DegreeType> {
(self.min == self.max).then_some(self.min)
self.is_unique().then_some(self.min)
}

/// Iterate through powers of two in this range
Expand Down
44 changes: 24 additions & 20 deletions backend/src/composite/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ struct MachineProof {
/// A composite proof that contains a proof for each machine separately, sorted by machine name.
#[derive(Serialize, Deserialize)]
struct CompositeProof {
/// Machine proofs, sorted by machine name.
proofs: Vec<MachineProof>,
/// Machine proofs by machine name.
proofs: BTreeMap<String, MachineProof>,
}

pub(crate) struct CompositeBackendFactory<F: FieldElement, B: BackendFactory<F>> {
Expand Down Expand Up @@ -312,11 +312,14 @@ impl<F: FieldElement> Backend<F> for CompositeBackend<F> {
let mut proofs_status = self
.machine_data
.iter()
.map(|machine_entry| {
.filter_map(|machine_entry| {
let (machine, machine_data) = machine_entry;
let (witness, size) =
process_witness_for_machine(machine, machine_data, witness);

if size == 0 {
// If a machine has no rows, remove it entirely.
return None;
}
let inner_machine_data = machine_data
.get(&size)
.expect("Machine does not support the given size");
Expand All @@ -325,11 +328,11 @@ impl<F: FieldElement> Backend<F> for CompositeBackend<F> {
sub_prover::run(scope, &inner_machine_data.backend, witness)
});

(status, machine_entry, size)
Some((status, machine_entry, size))
})
.collect::<Vec<_>>();

let mut proof_results = Vec::new();
let mut proof_results = BTreeMap::new();
let mut witness = Cow::Borrowed(witness);
for stage in 1.. {
// Filter out proofs that have completed and accumulate the
Expand All @@ -339,7 +342,8 @@ impl<F: FieldElement> Backend<F> for CompositeBackend<F> {
.into_iter()
.filter_map(|(status, machine_data, size)| match status {
sub_prover::RunStatus::Completed(result) => {
proof_results.push((result, size));
let (machine_name, _) = machine_data;
assert!(proof_results.insert(machine_name, (result, size)).is_none());
None
}
sub_prover::RunStatus::Challenged(sub_prover, c) => {
Expand Down Expand Up @@ -376,14 +380,14 @@ impl<F: FieldElement> Backend<F> for CompositeBackend<F> {

let proofs = proof_results
.into_iter()
.map(|(proof, size)| match proof {
Ok(proof) => Ok(MachineProof { size, proof }),
.map(|(machine_name, (proof, size))| match proof {
Ok(proof) => Ok((machine_name.clone(), MachineProof { size, proof })),
Err(e) => {
log::error!("==> Machine proof failed: {:?}", e);
Err(e)
}
})
.collect::<Result<Vec<_>, _>>()?;
.collect::<Result<BTreeMap<_, _>, _>>()?;

let proof = CompositeProof { proofs };
Ok(bincode::serialize(&proof).unwrap())
Expand All @@ -392,16 +396,16 @@ impl<F: FieldElement> Backend<F> for CompositeBackend<F> {

fn verify(&self, proof: &[u8], instances: &[Vec<F>]) -> Result<(), Error> {
let proof: CompositeProof = bincode::deserialize(proof).unwrap();
for (machine_data, machine_proof) in
self.machine_data.values().zip_eq(proof.proofs.into_iter())
{
machine_data
.get(&machine_proof.size)
.unwrap()
.backend
.lock()
.unwrap()
.verify(&machine_proof.proof, instances)?;
for (machine_name, machine_data) in self.machine_data.iter() {
if let Some(machine_proof) = proof.proofs.get(machine_name) {
machine_data
.get(&machine_proof.size)
.unwrap()
.backend
.lock()
.unwrap()
.verify(&machine_proof.proof, instances)?;
}
}
Ok(())
}
Expand Down
23 changes: 20 additions & 3 deletions executor/src/witgen/machines/block_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,10 +165,27 @@ impl<'a, T: FieldElement> Machine<'a, T> for BlockMachine<'a, T> {
mutable_state: &'b mut MutableState<'a, 'b, T, Q>,
) -> HashMap<String, Vec<T>> {
if self.data.len() < 2 * self.block_size {
log::warn!(
"Filling empty blocks with zeros, because the block machine is never used. \
if self.fixed_data.is_monolithic() {
log::warn!(
"Filling empty blocks with zeros, because the block machine is never used. \
This might violate some internal constraints."
);
);
} else {
log::info!(
"Machine {} is never used at runtime, so we remove it.",
self.name
);
// Return empty columns for all witnesses.
return self
.parts
.witnesses
.iter()
.map(|id| (*id, Vec::new()))
// Note that this panics if any count is not 0 (which shouldn't happen).
.chain(self.multiplicity_counter.generate_columns_single_size(0))
.map(|(id, values)| (self.fixed_data.column_name(&id).to_string(), values))
.collect();
}
}
self.degree = compute_size_and_log(&self.name, self.data.len(), self.degree_range);

Expand Down
77 changes: 45 additions & 32 deletions executor/src/witgen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use itertools::Itertools;
use machines::MachineParts;
use powdr_ast::analyzed::{
AlgebraicExpression, AlgebraicReference, Analyzed, DegreeRange, Expression,
FunctionValueDefinition, PolyID, PolynomialType, SymbolKind, TypedExpression,
FunctionValueDefinition, PolyID, PolynomialType, Symbol, SymbolKind, TypedExpression,
};
use powdr_ast::parsed::visitor::ExpressionVisitable;
use powdr_ast::parsed::{FunctionKind, LambdaExpression};
Expand Down Expand Up @@ -287,7 +287,6 @@ impl<'a, 'b, T: FieldElement> WitnessGenerator<'a, 'b, T> {
let column = columns
.remove(&name)
.unwrap_or_else(|| panic!("No machine generated witness for column: {name}"));
assert!(!column.is_empty());
(name, column)
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -373,36 +372,6 @@ pub struct FixedData<'a, T: FieldElement> {
}

impl<'a, T: FieldElement> FixedData<'a, T> {
/// Returns the common degree of a set or polynomials
///
/// # Panics
///
/// Panics if:
/// - the degree is not unique
/// - the set of polynomials is empty
/// - a declared polynomial does not have an explicit degree
fn common_degree_range<'b>(&self, ids: impl IntoIterator<Item = &'b PolyID>) -> DegreeRange {
let ids: HashSet<_> = ids.into_iter().collect();

self.analyzed
// get all definitions
.definitions
.iter()
// only keep polynomials
.filter_map(|(_, (symbol, _))| {
matches!(symbol.kind, SymbolKind::Poly(_)).then_some(symbol)
})
// get all array elements and their degrees
.flat_map(|symbol| symbol.array_elements().map(|(_, id)| (id, symbol.degree)))
// only keep the ones matching our set
.filter_map(|(id, degree)| ids.contains(&id).then_some(degree))
// get the common degree
.unique()
.exactly_one()
.unwrap_or_else(|_| panic!("expected all polynomials to have the same degree"))
.unwrap()
}

pub fn new(
analyzed: &'a Analyzed<T>,
fixed_col_values: &'a [(String, VariablySizedColumn<T>)],
Expand Down Expand Up @@ -490,6 +459,50 @@ impl<'a, T: FieldElement> FixedData<'a, T> {
}
}

fn all_poly_symbols(&self) -> impl Iterator<Item = &Symbol> {
self.analyzed
.definitions
.iter()
.filter_map(|(_, (symbol, _))| {
matches!(symbol.kind, SymbolKind::Poly(_)).then_some(symbol)
})
}

/// Returns the common degree of a set or polynomials
///
/// # Panics
///
/// Panics if:
/// - the degree is not unique
/// - the set of polynomials is empty
/// - a declared polynomial does not have an explicit degree
fn common_degree_range<'b>(&self, ids: impl IntoIterator<Item = &'b PolyID>) -> DegreeRange {
let ids: HashSet<_> = ids.into_iter().collect();

self.all_poly_symbols()
.flat_map(|symbol| symbol.array_elements().map(|(_, id)| (id, symbol.degree)))
// only keep the ones matching our set
.filter_map(|(id, degree)| ids.contains(&id).then_some(degree))
// get the common degree
.unique()
.exactly_one()
.unwrap_or_else(|_| panic!("expected all polynomials to have the same degree"))
.unwrap()
}

/// Returns whether all polynomials have the same static degree.
fn is_monolithic(&self) -> bool {
match self
.all_poly_symbols()
.map(|symbol| symbol.degree.unwrap())
.unique()
.exactly_one()
{
Ok(degree) => degree.is_unique(),
_ => false,
}
}

pub fn global_range_constraints(&self) -> &GlobalConstraints<T> {
&self.global_range_constraints
}
Expand Down
24 changes: 22 additions & 2 deletions pipeline/tests/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ use powdr_pipeline::{
asm_string_to_pil, gen_estark_proof_with_backend_variant, make_prepared_pipeline,
make_simple_prepared_pipeline, regular_test, regular_test_without_small_field,
resolve_test_file, run_pilcom_with_backend_variant, test_halo2,
test_halo2_with_backend_variant, test_pilcom, test_plonky3_with_backend_variant,
BackendVariant,
test_halo2_with_backend_variant, test_pilcom, test_plonky3_pipeline,
test_plonky3_with_backend_variant, BackendVariant,
},
util::{FixedPolySet, PolySet, WitnessPolySet},
Pipeline,
Expand Down Expand Up @@ -144,6 +144,26 @@ fn block_to_block() {
regular_test(f, &[]);
}

#[test]
fn block_to_block_empty_submachine() {
let f = "asm/block_to_block_empty_submachine.asm";
let mut pipeline = make_simple_prepared_pipeline(f);

let witness = pipeline.compute_witness().unwrap();
let arith_size = witness
.iter()
.find(|(k, _)| k == "main_arith::x")
.unwrap()
.1
.len();
assert_eq!(arith_size, 0);

test_halo2_with_backend_variant(pipeline, BackendVariant::Composite);

let pipeline = make_simple_prepared_pipeline::<GoldilocksField>(f);
test_plonky3_pipeline(pipeline);
}

#[test]
fn block_to_block_with_bus_monolithic() {
let f = "asm/block_to_block_with_bus.asm";
Expand Down
10 changes: 7 additions & 3 deletions plonky3/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -403,11 +403,15 @@ where
let (tables, stage_0): (BTreeMap<_, _>, BTreeMap<_, _>) = program
.split
.iter()
.map(|(name, (pil, constraint_system))| {
.filter_map(|(name, (pil, constraint_system))| {
let columns = machine_witness_columns(witness, pil, name);
let degree = columns[0].1.len();

(
if degree == 0 {
// If a machine has no rows, remove it entirely.
return None;
}
Some((
(
name.clone(),
Table {
Expand All @@ -433,7 +437,7 @@ where
.collect(),
},
),
)
))
})
.unzip();

Expand Down
40 changes: 33 additions & 7 deletions plonky3/src/verifier.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use alloc::collections::BTreeMap;
use alloc::string::String;
use alloc::string::{String, ToString};
use alloc::vec;
use alloc::vec::Vec;
use core::iter::once;
Expand Down Expand Up @@ -75,6 +75,7 @@ pub fn verify<T: FieldElementMap>(
split: &BTreeMap<&String, &ConstraintSystem<T>>,
challenger: &mut Challenger<T>,
proof: &Proof<T::Config>,
// Machine name -> (stage -> public values)
public_inputs: BTreeMap<String, Vec<Vec<T>>>,
) -> Result<(), VerificationError<PcsError<T::Config>>>
where
Expand All @@ -100,12 +101,37 @@ where
opening_proof,
} = proof;

// Filters out machines that are not included in the proof.
// With a sound bus argument, the prover can only do this if they don't interact
// with the bus, i.e., are empty.
let split = split
.iter()
.filter_map(|(k, v)| opened_values.contains_key(*k).then_some((*k, v)))
.collect::<BTreeMap<_, _>>();
let public_inputs = public_inputs
.iter()
.filter_map(|(k, v)| {
if opened_values.contains_key(k) {
Some((k, v))
} else {
for stage_publics in v {
// TODO: This will fail once we expose the accumulators as publics...
// If we machine is removed, we want to use an accumulator value of 0.
assert!(stage_publics.is_empty());
}
None
}
})
.collect::<BTreeMap<_, _>>();

// sanity check that the two maps have the same keys
itertools::assert_equal(split.keys().cloned(), public_inputs.keys());
itertools::assert_equal(split.keys(), public_inputs.keys());

// error out if the opened values do not have the same keys as the tables
if !itertools::equal(split.keys().cloned(), opened_values.keys()) {
return Err(VerificationError::InvalidProofShape);
return Err(VerificationError::InvalidProofShape(
"Opened values do not have the same keys as the tables".to_string(),
));
}

let tables: BTreeMap<&String, Table<_>> = split
Expand All @@ -115,14 +141,14 @@ where
.map(
|((constraints, (name, public_values_by_stage)), opened_values)| {
(
name,
*name,
Table {
air: PowdrTable::new(constraints),
opened_values,
public_values_by_stage,
preprocessed: verifying_key
.as_ref()
.and_then(|vk| vk.preprocessed.get(name)),
.and_then(|vk| vk.preprocessed.get(*name)),
},
)
},
Expand Down Expand Up @@ -425,12 +451,12 @@ where
&& challenge_counts.len() as u8 == stage_count;

res.then_some(())
.ok_or(VerificationError::InvalidProofShape)
.ok_or_else(|| VerificationError::InvalidProofShape("Invalid opening shape".to_string()))
}

#[derive(Debug)]
pub enum VerificationError<PcsErr> {
InvalidProofShape,
InvalidProofShape(String),
/// An error occurred while verifying the claimed openings.
InvalidOpeningArgument(PcsErr),
/// Out-of-domain evaluation mismatch, i.e. `constraints(zeta)` did not match
Expand Down
Loading

0 comments on commit ed6e4f7

Please sign in to comment.