From ebb2fd31fa38cb6675efae47645deeb8a722bed7 Mon Sep 17 00:00:00 2001 From: Sladuca Date: Sun, 29 Jan 2023 16:58:36 -0500 Subject: [PATCH 1/3] first iteratione, fib works --- starky/src/cross_table_lookup.rs | 552 +++++++++++++++++++++++++++++++ starky/src/fibonacci_stark.rs | 164 ++++----- starky/src/get_challenges.rs | 189 ++++++++++- starky/src/lib.rs | 1 + starky/src/proof.rs | 64 +++- starky/src/prover.rs | 199 ++++++++++- starky/src/stark.rs | 44 ++- starky/src/vanishing_poly.rs | 2 + starky/src/verifier.rs | 164 +++++---- 9 files changed, 1211 insertions(+), 168 deletions(-) create mode 100644 starky/src/cross_table_lookup.rs diff --git a/starky/src/cross_table_lookup.rs b/starky/src/cross_table_lookup.rs new file mode 100644 index 0000000000..f9680fa1b7 --- /dev/null +++ b/starky/src/cross_table_lookup.rs @@ -0,0 +1,552 @@ +#![allow(clippy::all)] + +/// An implementation of Hermez's "domain-free" cross-table lookup +/// this differes from the main cross-table lookup in `evm` where +/// the "looking" table does not have to be at least as long as the "looked" table +/// paper: https://eprint.iacr.org/2022/1050 + +use anyhow::{anyhow, Result}; +use itertools::Itertools; +use plonky2::field::extension::{Extendable, FieldExtension}; +use plonky2::field::packed::PackedField; +use plonky2::field::polynomial::PolynomialValues; +use plonky2::field::types::Field; +use plonky2::hash::hash_types::RichField; +use plonky2::iop::challenger::Challenger; +use plonky2::plonk::config::{GenericConfig, Hasher}; + +use crate::config::StarkConfig; +use crate::consumer::{Consumer, FilteredConsumer}; +use crate::consumer::basic::ConstraintConsumer; +use crate::ir::{Registers, FirstRow, LastRow, Transition}; +use crate::proof::{StarkProof, StarkProofWithPublicInputs}; +use crate::stark::Stark; + +/// represets a set of cross-table lookups to be performed between an arbitrary number of starks on arbitrary sets of colums +#[derive(Debug, Clone)] +pub struct CtlDescriptor { + /// instances of CTLs, where a colset in one table "looks up" a column in another table + /// represented as pairs of columns where the LHS is a set of columns in some table "looking" up the RHS, another set of columns in some table + pub instances: Vec<(CtlColSet, CtlColSet)>, + /// the number of tables involved + pub num_tables: usize, +} + +impl CtlDescriptor { + pub fn from_instances(instances: Vec<(CtlColSet, CtlColSet)>, num_tables: usize) -> Self { + Self { + instances, + num_tables, + } + } +} + +/// Describes a set of columns that is involved in a cross-table lookup +/// These columns are "linked" together via a linear-combination. This +/// means the lookup effectively amounts to looking up a "tuple" +/// of columns up to the *same* permutations. In other words, +/// if a set of colums (a, b, c) in trace 0 is "looking up" +/// a set of columns in (x, y, z) in trace 1, then the lookup will +/// enforce that, for every row i in trace 0, there exists a row j in trace 1 +/// such that (a[i], b[i], c[i]) = (x[j], y[j], z[j]). +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct CtlColSet { + /// table ID for the table that this column set belongs to + pub(crate) tid: TableID, + /// set of column indices for this side of the CTL + pub(crate) colset: Vec, + /// column index for the corresponding filter, if any + pub(crate) filter_col: Option, +} + +impl CtlColSet { + pub fn new(tid: TableID, colset: Vec, filter_col: Option) -> CtlColSet { + CtlColSet { + tid, + colset, + filter_col, + } + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[repr(transparent)] +pub struct TableID(pub usize); + +impl From for TableID { + fn from(id: usize) -> Self { + TableID(id) + } +} + +impl From for usize { + fn from(id: TableID) -> Self { + id.0 + } +} + +// challenges used to reduce a set of columns to a single polynomial +// against which the CTL is performed +#[derive(Debug, Copy, Clone)] +pub struct CtlLinearCombChallenge { + pub(crate) alpha: F, + pub(crate) gamma: F, +} + +pub(crate) fn get_ctl_linear_comb_challenge>( + challenger: &mut Challenger, +) -> CtlLinearCombChallenge { + CtlLinearCombChallenge { + alpha: challenger.get_challenge(), + gamma: challenger.get_challenge(), + } +} + +// challenges used to compute the lookup's Z polys against the reduced colset poly +#[derive(Debug, Copy, Clone)] +pub struct CtlChallenge { + pub(crate) gamma: F, +} + +pub(crate) fn get_ctl_challenge>( + challenger: &mut Challenger, +) -> CtlChallenge { + CtlChallenge { + gamma: challenger.get_challenge(), + } +} + +#[derive(Debug, Clone)] +pub(crate) struct CtlColsetData { + // the set of columns + pub(crate) colset: CtlColSet, + // the Z poly for the CTL + // there are `num_challenges` of them + pub(crate) z_polys: Vec>, + // the challenges used to reduce the colset into a single polynomial + // there are `num_challenges` of them + pub(crate) linear_comb_challenges: Vec>, + // the challenges used to compute the Z polys against the reduced polys + // there are `num_challenges` of them + pub(crate) ctl_challenges: Vec>, +} + +#[derive(Debug, Clone)] +pub struct CtlData { + pub by_table: Vec>, +} + +#[derive(Debug, Clone)] +pub struct CtlTableData { + pub(crate) looking: Vec>, + pub(crate) looked: Vec>, +} + +impl CtlTableData { + pub(crate) fn zs(&self) -> Vec> { + let mut zs = Vec::new(); + for colset in self.looking.iter().chain(self.looked.iter()) { + zs.extend(colset.z_polys.iter().cloned()); + } + + zs + } + + pub(crate) fn num_zs(&self) -> usize { + self.looking + .iter() + .chain(self.looked.iter()) + .map(|colset| colset.z_polys.len()) + .sum() + } + + pub(crate) fn challenges(&self) -> (Vec>, Vec>) { + let mut linear_comb_challenges = Vec::new(); + let mut ctl_challenges = Vec::new(); + for colset in self.looking.iter().chain(self.looked.iter()) { + linear_comb_challenges.extend(colset.linear_comb_challenges.iter().cloned()); + ctl_challenges.extend(colset.ctl_challenges.iter().cloned()); + } + + (linear_comb_challenges, ctl_challenges) + } + + pub(crate) fn cols(&self) -> Vec { + let mut cols = Vec::new(); + for colset in self.looking.iter().chain(self.looked.iter()) { + cols.push(colset.colset.clone()); + } + + cols + } +} + +// compute the preprocessed polynomials necessary for the lookup argument given CTL traces and table descriptors +pub fn get_ctl_data, C: GenericConfig, const D: usize>( + config: &StarkConfig, + trace_poly_valueses: &[Vec>], + ctl_descriptor: &CtlDescriptor, + challenger: &mut Challenger, +) -> CtlData { + let num_challenges = config.num_challenges; + + let mut by_table = vec![ + CtlTableData { + looking: Vec::new(), + looked: Vec::new() + }; + ctl_descriptor.num_tables + ]; + + for (looking_colset, looked_colset) in ctl_descriptor.instances.iter() { + let CtlColSet { + tid: looking_tid, + colset: looking_cols, + filter_col: looking_filter_col, + } = looking_colset.clone(); + + let CtlColSet { + tid: looked_tid, + colset: looked_cols, + filter_col: looked_filter_col, + } = looked_colset.clone(); + + let linear_comb_challenges = (0..num_challenges) + .map(|_| get_ctl_linear_comb_challenge(challenger)) + .collect_vec(); + + let ctl_challenges = (0..num_challenges) + .map(|_| get_ctl_challenge(challenger)) + .collect_vec(); + + let looking_filter_poly = + looking_filter_col.map(|col| &trace_poly_valueses[looking_tid.0][col]); + let looking_colset_polys = looking_cols + .iter() + .map(|&col| &trace_poly_valueses[looking_tid.0][col]) + .collect_vec(); + let looking_z_polys = linear_comb_challenges + .iter() + .zip(ctl_challenges.iter()) + .map(|(linear_comb_challenge, challenge)| { + compute_z_poly( + &looking_colset_polys, + looking_filter_poly, + linear_comb_challenge, + challenge, + ) + }) + .collect_vec(); + + let looked_filter_poly = + looked_filter_col.map(|col| &trace_poly_valueses[looked_tid.0][col]); + let looked_colset_polys = looked_cols + .iter() + .map(|&col| &trace_poly_valueses[looked_tid.0][col]) + .collect_vec(); + let looked_z_polys = linear_comb_challenges + .iter() + .zip(ctl_challenges.iter()) + .map(|(linear_comb_challenge, challenge)| { + compute_z_poly( + &looked_colset_polys, + looked_filter_poly, + linear_comb_challenge, + challenge, + ) + }) + .collect_vec(); + + let looking_data = CtlColsetData { + colset: looking_colset.clone(), + z_polys: looking_z_polys, + linear_comb_challenges: linear_comb_challenges.clone(), + ctl_challenges: ctl_challenges.clone(), + }; + + let looked_data = CtlColsetData { + colset: looked_colset.clone(), + z_polys: looked_z_polys, + linear_comb_challenges, + ctl_challenges, + }; + + by_table[looking_tid.0].looking.push(looking_data); + by_table[looked_tid.0].looked.push(looked_data); + } + + CtlData { by_table } +} + +fn compute_z_poly( + colset_polys: &[&PolynomialValues], + selector_poly: Option<&PolynomialValues>, + linear_comb_challenge: &CtlLinearCombChallenge, + challenge: &CtlChallenge, +) -> PolynomialValues { + let &CtlLinearCombChallenge { gamma, alpha } = linear_comb_challenge; + let eval_reduced = |row: usize| { + let mut eval = F::ZERO; + for &poly in colset_polys { + eval = eval * alpha + poly.values[row]; + } + eval + gamma + }; + + let &CtlChallenge { gamma } = challenge; + if let Some(selector_poly) = selector_poly { + let mut evals = Vec::new(); + let mut eval = F::ONE; + for i in (0..selector_poly.len()).filter(|&i| selector_poly.values[i] != F::ZERO) { + debug_assert!(selector_poly.values[i] == F::ONE, "non-binary filter"); + + evals.resize(i, eval); + + eval *= eval_reduced(i) + gamma; + evals.push(eval); + } + evals.resize(selector_poly.len(), eval); + PolynomialValues::new(evals) + } else { + let evals = (0..colset_polys[0].len()) + .map(eval_reduced) + .scan(F::ONE, |eval, reduced_eval| { + *eval *= reduced_eval + gamma; + Some(*eval) + }) + .collect_vec(); + PolynomialValues::new(evals) + } +} + +#[derive(Debug, Clone)] +pub struct CtlCheckVars +where + F: Field, + FE: FieldExtension, + P: PackedField, +{ + pub(crate) local_zs: Vec

, + pub(crate) next_zs: Vec

, + pub(crate) first_zs: Vec, + pub(crate) last_zs: Vec, + pub(crate) linear_comb_challenges: Vec>, + pub(crate) challenges: Vec>, + pub(crate) cols: Vec, +} + +impl CtlCheckVars +where + F: RichField + Extendable, +{ + pub fn from_proofs>( + proofs: &[StarkProofWithPublicInputs], + ctl_descriptor: &CtlDescriptor, + linear_comb_challenges_by_table: &[Vec>], + ctl_challenges_by_table: &[Vec>], + ) -> Vec { + let num_tables = ctl_descriptor.num_tables; + debug_assert_eq!(num_tables, proofs.len()); + + let first_last_zs = proofs.iter().map(|p| { + ( + p.proof + .openings + .ctl_zs_first + .as_ref() + .expect("no ctl first opening!") + .clone(), + p.proof + .openings + .ctl_zs_last + .as_ref() + .expect("no ctl last opening!") + .clone(), + ) + }); + let ctl_zs = proofs + .iter() + .map(|p| { + let openings = &p.proof.openings; + let ctl_zs = openings.ctl_zs.as_ref().expect("no ctl openings!").iter(); + let ctl_zs_next = openings + .ctl_zs_next + .as_ref() + .expect("no ctl openings!") + .iter(); + ctl_zs.zip(ctl_zs_next) + }) + .collect_vec(); + + // (looking, looked) by table + let mut instances_by_table = vec![(Vec::new(), Vec::new()); num_tables]; + for (looking, looked) in ctl_descriptor.instances.iter() { + instances_by_table[looking.tid.0].0.push(looking); + instances_by_table[looked.tid.0].1.push(looked); + } + + let instances_by_table = instances_by_table + .iter() + .map(|(looking, looked)| looking.iter().chain(looked.iter())); + + instances_by_table + .zip(first_last_zs.zip(ctl_zs)) + .zip( + linear_comb_challenges_by_table + .iter() + .zip(ctl_challenges_by_table.iter()), + ) + .map( + |( + (instances, ((first_zs, last_zs), ctl_zs)), + (linear_comb_challenges, ctl_challenges), + )| { + let cols = instances.map(|&x| x.clone()).collect_vec(); + let (local_zs, next_zs) = ctl_zs.unzip(); + + let challenges = ctl_challenges.clone(); + let linear_comb_challenges = linear_comb_challenges.clone(); + + CtlCheckVars { + local_zs, + next_zs, + first_zs, + last_zs, + linear_comb_challenges, + challenges, + cols, + } + }, + ) + .collect_vec() + } +} + +pub(crate) fn eval_cross_table_lookup_checks( + vars: Registers

, + ctl_vars: &CtlCheckVars, + consumer: &mut ConstraintConsumer

, + num_challenges: usize, +) where + F: RichField + Extendable, + FE: FieldExtension, + P: PackedField, + C: GenericConfig, + S: Stark>, +{ + debug_assert_eq!( + ctl_vars.challenges.len(), + num_challenges * ctl_vars.cols.len() + ); + + let eval_reduced = |evals: &[P], alpha: F, gamma: F| { + let mut sum = P::ZEROS; + for &eval in evals { + sum = sum * FE::from_basefield(alpha) + eval + } + sum + FE::from_basefield(gamma) + }; + + for instance in 0..ctl_vars.cols.len() { + let colset = &ctl_vars.cols[instance]; + let filter_col = colset.filter_col; + let local_ctl_col_values = colset + .colset + .iter() + .map(|&col| vars.local_values[col]) + .collect_vec(); + let next_ctl_col_values = colset + .colset + .iter() + .map(|&col| vars.next_values[col]) + .collect_vec(); + + let sel = filter_col.map_or(P::ONES, |col| vars.local_values[col]); + // check filter is binary + consumer.constraint(sel * (P::ONES - sel), &mut ()); + + let next_sel = filter_col.map_or(P::ONES, |col| vars.next_values[col]); + + for i in 0..num_challenges { + let linear_comb_challenge = + &ctl_vars.linear_comb_challenges[instance * num_challenges + i]; + let challenge = &ctl_vars.challenges[instance * num_challenges + i]; + let local_z = ctl_vars.local_zs[instance * num_challenges + i]; + let next_z = ctl_vars.next_zs[instance * num_challenges + i]; + let first_z = ctl_vars.first_zs[instance * num_challenges + i]; + let last_z = ctl_vars.last_zs[instance * num_challenges + i]; + + // check first and last zs + consumer.constraint_filtered(FirstRow, local_z - FE::from_basefield(first_z), &mut ()); + consumer.constraint_filtered(LastRow, local_z - FE::from_basefield(last_z), &mut ()); + + // check grand product + let reduced_eval = eval_reduced( + &next_ctl_col_values, + linear_comb_challenge.alpha, + linear_comb_challenge.gamma, + ); + let eval = reduced_eval + FE::from_basefield(challenge.gamma) - P::ONES; + consumer.constraint_filtered(Transition, next_z - (local_z * (next_sel * eval + P::ONES)), &mut ()); + consumer.constraint_filtered(LastRow, next_z - FE::from_basefield(first_z), &mut ()); + + // check grand product start + let reduced_eval = eval_reduced( + &local_ctl_col_values, + linear_comb_challenge.alpha, + linear_comb_challenge.gamma, + ); + let eval = reduced_eval + FE::from_basefield(challenge.gamma) - P::ONES; + consumer.constraint_filtered(FirstRow, (sel * eval + P::ONES) - FE::from_basefield(first_z), &mut ()); + } + } +} + +pub fn verify_cross_table_lookups< + 'a, + I: Iterator>, + F: RichField + Extendable, + C: GenericConfig + 'a, + const D: usize, +>( + proofs: I, + descriptor: &CtlDescriptor, + num_challenges: usize, +) -> Result<()> { + let ctl_zs_openings = proofs + .flat_map(|p| p.openings.ctl_zs_last.iter()) + .collect_vec(); + + let mut looking_zs = Vec::new(); + let mut looked_zs = Vec::new(); + let mut indices = vec![0; descriptor.num_tables]; + for (looking, _) in descriptor.instances.iter() { + let tid = looking.tid; + let idx = indices[tid.0]; + let zs = &ctl_zs_openings[tid.0][idx..idx + num_challenges]; + indices[tid.0] += num_challenges; + looking_zs.extend(zs.iter().map(move |z| (z, tid))); + } + + for (_, looked) in descriptor.instances.iter() { + let tid = looked.tid; + let idx = indices[tid.0]; + let zs = &ctl_zs_openings[tid.0][idx..idx + num_challenges]; + indices[tid.0] += num_challenges; + looked_zs.extend(zs.iter().map(move |z| (z, tid))); + } + + for ((looking_z, looking_tid), (looked_z, looked_tid)) in + looking_zs.into_iter().zip(looked_zs.into_iter()) + { + if looking_z != looked_z { + let msg = format!( + "cross table lookup verification failed. looking TableID: {}, looked TableID: {}, looking_z: {:?}, looked_z: {:?}", + looking_tid.0, looked_tid.0, looking_z, looked_z + ); + return Err(anyhow!(msg)); + } + } + + Ok(()) +} diff --git a/starky/src/fibonacci_stark.rs b/starky/src/fibonacci_stark.rs index 9de68086dd..210aa85241 100644 --- a/starky/src/fibonacci_stark.rs +++ b/starky/src/fibonacci_stark.rs @@ -120,13 +120,13 @@ mod test { use crate::config::StarkConfig; use crate::consumer::basic::RecursiveConstraintConsumer; use crate::proof::StarkProofWithPublicInputs; - use crate::prover::prove; + use crate::prover::prove_no_ctl; use crate::recursive_verifier::{ add_virtual_stark_proof_with_pis, set_stark_proof_with_pis_target, verify_stark_proof_circuit, }; use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; - use crate::verifier::verify_stark_proof; + use crate::verifier::verify_stark_proof_no_ctl; fn fibonacci(n: usize, x0: F, x1: F) -> F { (0..n).fold((x0, x1), |x, _| (x.1, x.0 + x.1)).1 @@ -144,15 +144,15 @@ mod test { let public_inputs = vec![F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; let stark = S::new(num_rows); let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); - let proof = prove::( - stark, + let proof = prove_no_ctl::( + &stark, &config, - trace, + &trace, public_inputs, &mut TimingTree::default(), )?; - verify_stark_proof(stark, proof, &config) + verify_stark_proof_no_ctl(&stark, proof, &config) } #[test] @@ -168,82 +168,82 @@ mod test { test_stark_low_degree::(stark, metadata.columns, metadata.public_inputs) } - #[test] - fn test_fibonacci_stark_circuit() -> Result<()> { - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - type S = FibonacciStark; - - let num_rows = 1 << 5; - let stark = S::new(num_rows); - let metadata = stark.metadata(); - test_stark_circuit_constraints::( - stark, - metadata.columns, - metadata.public_inputs, - ) - } - - #[test] - fn test_recursive_stark_verifier() -> Result<()> { - init_logger(); - const D: usize = 2; - type C = PoseidonGoldilocksConfig; - type F = >::F; - type S = FibonacciStark; - - let config = StarkConfig::standard_fast_config(); - let num_rows = 1 << 5; - let public_inputs = vec![F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; - let stark = S::new(num_rows); - let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); - let proof = prove::( - stark, - &config, - trace, - public_inputs, - &mut TimingTree::default(), - )?; - verify_stark_proof(stark, proof.clone(), &config)?; - - recursive_proof::(stark, proof, &config, true) - } - - fn recursive_proof< - F: RichField + Extendable, - C: GenericConfig, - S: Copy - + Stark> - + Stark, RecursiveConstraintConsumer, CircuitBuilder>, - InnerC: GenericConfig, - const D: usize, - >( - stark: S, - inner_proof: StarkProofWithPublicInputs, - inner_config: &StarkConfig, - print_gate_counts: bool, - ) -> Result<()> - where - InnerC::Hasher: AlgebraicHasher, - { - let circuit_config = CircuitConfig::standard_recursion_config(); - let mut builder = CircuitBuilder::::new(circuit_config); - let mut pw = PartialWitness::new(); - let degree_bits = inner_proof.proof.recover_degree_bits(inner_config); - let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); - set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); - - verify_stark_proof_circuit::(&mut builder, stark, pt, inner_config); - - if print_gate_counts { - builder.print_gate_counts(0); - } - - let data = builder.build::(); - let proof = data.prove(pw)?; - data.verify(proof) - } + // #[test] + // fn test_fibonacci_stark_circuit() -> Result<()> { + // const D: usize = 2; + // type C = PoseidonGoldilocksConfig; + // type F = >::F; + // type S = FibonacciStark; + + // let num_rows = 1 << 5; + // let stark = S::new(num_rows); + // let metadata = stark.metadata(); + // test_stark_circuit_constraints::( + // stark, + // metadata.columns, + // metadata.public_inputs, + // ) + // } + + // #[test] + // fn test_recursive_stark_verifier() -> Result<()> { + // init_logger(); + // const D: usize = 2; + // type C = PoseidonGoldilocksConfig; + // type F = >::F; + // type S = FibonacciStark; + + // let config = StarkConfig::standard_fast_config(); + // let num_rows = 1 << 5; + // let public_inputs = vec![F::ZERO, F::ONE, fibonacci(num_rows - 1, F::ZERO, F::ONE)]; + // let stark = S::new(num_rows); + // let trace = stark.generate_trace(public_inputs[0], public_inputs[1]); + // let proof = prove::( + // stark, + // &config, + // trace, + // public_inputs, + // &mut TimingTree::default(), + // )?; + // verify_stark_proof(stark, proof.clone(), &config)?; + + // recursive_proof::(stark, proof, &config, true) + // } + + // fn recursive_proof< + // F: RichField + Extendable, + // C: GenericConfig, + // S: Copy + // + Stark> + // + Stark, RecursiveConstraintConsumer, CircuitBuilder>, + // InnerC: GenericConfig, + // const D: usize, + // >( + // stark: S, + // inner_proof: StarkProofWithPublicInputs, + // inner_config: &StarkConfig, + // print_gate_counts: bool, + // ) -> Result<()> + // where + // InnerC::Hasher: AlgebraicHasher, + // { + // let circuit_config = CircuitConfig::standard_recursion_config(); + // let mut builder = CircuitBuilder::::new(circuit_config); + // let mut pw = PartialWitness::new(); + // let degree_bits = inner_proof.proof.recover_degree_bits(inner_config); + // let pt = add_virtual_stark_proof_with_pis(&mut builder, stark, inner_config, degree_bits); + // set_stark_proof_with_pis_target(&mut pw, &pt, &inner_proof); + + // verify_stark_proof_circuit::(&mut builder, stark, pt, inner_config); + + // if print_gate_counts { + // builder.print_gate_counts(0); + // } + + // let data = builder.build::(); + // let proof = data.prove(pw)?; + // data.verify(proof) + // } fn init_logger() { let _ = env_logger::builder().format_timestamp(None).try_init(); diff --git a/starky/src/get_challenges.rs b/starky/src/get_challenges.rs index 1bceffc82a..6fed38c39a 100644 --- a/starky/src/get_challenges.rs +++ b/starky/src/get_challenges.rs @@ -1,5 +1,6 @@ use alloc::vec::Vec; +use itertools::Itertools; use plonky2::field::extension::Extendable; use plonky2::field::polynomial::PolynomialCoeffs; use plonky2::fri::proof::{FriProof, FriProofTarget}; @@ -12,14 +13,147 @@ use plonky2::plonk::circuit_builder::CircuitBuilder; use plonky2::plonk::config::{AlgebraicHasher, GenericConfig}; use crate::config::StarkConfig; +use crate::cross_table_lookup::{CtlDescriptor, CtlLinearCombChallenge, CtlChallenge, get_ctl_linear_comb_challenge, get_ctl_challenge}; use crate::permutation::{ get_n_permutation_challenge_sets, get_n_permutation_challenge_sets_target, }; use crate::proof::*; use crate::stark::StarkConfiguration; +// makes a challenger and overves the trace caps +pub fn start_all_proof_challenger<'a, F, C, I, const D: usize>( + trace_caps: I, +) -> Challenger +where + I: Iterator>, + F: RichField + Extendable, + C: GenericConfig, + >::Hasher: 'a, +{ + let mut challenger = Challenger::::new(); + + for cap in trace_caps { + challenger.observe_cap(cap); + } + + challenger +} + +// assumes `start_all_proof_challenges` was used to get the challenger +pub fn get_ctl_challenges_by_table( + challenger: &mut Challenger, + ctl_descriptor: &CtlDescriptor, + num_challenges: usize, +) -> ( + Vec>>, + Vec>>, +) +where + F: RichField + Extendable, + C: GenericConfig, +{ + let mut linear_comb_challenges_by_table = + vec![(Vec::new(), Vec::new()); ctl_descriptor.num_tables]; + let mut ctl_challenges_by_table = vec![(Vec::new(), Vec::new()); ctl_descriptor.num_tables]; + for (looking, looked) in ctl_descriptor.instances.iter() { + let linear_comb_challenges = (0..num_challenges) + .map(|_| get_ctl_linear_comb_challenge(challenger)) + .collect_vec(); + + let ctl_challenges = (0..num_challenges) + .map(|_| get_ctl_challenge(challenger)) + .collect_vec(); + + linear_comb_challenges_by_table[looking.tid.0] + .0 + .extend(linear_comb_challenges.clone()); + linear_comb_challenges_by_table[looked.tid.0] + .1 + .extend(linear_comb_challenges); + + ctl_challenges_by_table[looking.tid.0] + .0 + .extend(ctl_challenges.clone()); + ctl_challenges_by_table[looked.tid.0] + .1 + .extend(ctl_challenges); + } + + let linear_comb_challenges_by_table = linear_comb_challenges_by_table + .into_iter() + .map(|(looking, looked)| [looking, looked].concat()) + .collect_vec(); + + let ctl_challenges_by_table = ctl_challenges_by_table + .into_iter() + .map(|(looking, looked)| [looking, looked].concat()) + .collect_vec(); + + (linear_comb_challenges_by_table, ctl_challenges_by_table) +} + +// IMPORTANT: assumes `challenger` has already observed the trace caps and the ctl challenges have already been extracted +fn get_single_table_challenges( + stark: &S, + challenger: &mut Challenger, + permutation_zs_cap: Option<&MerkleCap>, + ctl_zs_cap: Option<&MerkleCap>, + quotient_polys_cap: &MerkleCap, + openings: &StarkOpeningSet, + commit_phase_merkle_caps: &[MerkleCap], + final_poly: &PolynomialCoeffs, + pow_witness: F, + config: &StarkConfig, + degree_bits: usize, +) -> StarkProofChallenges +where + F: RichField + Extendable, + C: GenericConfig, + S: StarkConfiguration, +{ + let num_challenges = config.num_challenges; + let metadata = stark.metadata(); + + let permutation_challenge_sets = permutation_zs_cap.map(|permutation_zs_cap| { + let tmp = get_n_permutation_challenge_sets( + challenger, + num_challenges, + metadata.permutation_batch_size(), + ); + challenger.observe_cap(permutation_zs_cap); + tmp + }); + + if let Some(cap) = ctl_zs_cap { + challenger.observe_cap(cap); + } + + let stark_alphas = challenger.get_n_challenges(num_challenges); + + challenger.observe_cap(quotient_polys_cap); + + let stark_zeta = challenger.get_extension_challenge::(); + + challenger.observe_openings(&openings.to_fri_openings()); + + let fri_challenges = challenger.fri_challenges::( + commit_phase_merkle_caps, + final_poly, + pow_witness, + degree_bits, + &config.fri_config, + ); + + StarkProofChallenges { + permutation_challenge_sets, + stark_alphas, + stark_zeta, + fri_challenges, + } +} + #[allow(clippy::too_many_arguments)] // NOTE: Clippy is too harsh here. -fn get_challenges( +fn get_stark_challenges( stark: &S, trace_cap: &MerkleCap, permutation_zs_cap: Option<&MerkleCap>, @@ -80,7 +214,7 @@ where { // TODO: Should be used later in compression? #![allow(dead_code)] - pub(crate) fn fri_query_indices( + pub(crate) fn fri_query_indices_no_ctl( &self, stark: &S, config: &StarkConfig, @@ -89,13 +223,13 @@ where where S: StarkConfiguration, { - self.get_challenges(stark, config, degree_bits) + self.get_stark_challenges_no_ctl(stark, config, degree_bits) .fri_challenges .fri_query_indices } /// Computes all Fiat-Shamir challenges used in the STARK proof. - pub(crate) fn get_challenges( + pub(crate) fn get_stark_challenges_no_ctl( &self, stark: &S, config: &StarkConfig, @@ -107,6 +241,7 @@ where let StarkProof { trace_cap, permutation_zs_cap, + ctl_zs_cap, quotient_polys_cap, openings, opening_proof: @@ -117,7 +252,13 @@ where .. }, } = &self.proof; - get_challenges::( + + assert!( + ctl_zs_cap.is_none(), + "CTLs not supported in `get_stark_challenges_no_ctl`" + ); + + get_stark_challenges::( stark, trace_cap, permutation_zs_cap.as_ref(), @@ -128,6 +269,44 @@ where *pow_witness, config, degree_bits, + ) + } + + /// Computes Fiat-Shamir challenges for this specific proof within an `AllProof` + pub(crate) fn get_stark_challenges_with_ctl( + &self, + stark: &S, + config: &StarkConfig, + challenger: &mut Challenger, + degree_bits: usize, + ) -> StarkProofChallenges { + let StarkProof { + trace_cap: _, + permutation_zs_cap, + ctl_zs_cap, + quotient_polys_cap, + openings, + opening_proof: + FriProof { + commit_phase_merkle_caps, + final_poly, + pow_witness, + .. + }, + } = &self.proof; + + get_single_table_challenges::( + stark, + challenger, + permutation_zs_cap.as_ref(), + ctl_zs_cap.as_ref(), + quotient_polys_cap, + openings, + commit_phase_merkle_caps, + final_poly, + *pow_witness, + config, + degree_bits, ) } } diff --git a/starky/src/lib.rs b/starky/src/lib.rs index c9071d7410..c400e7cc41 100644 --- a/starky/src/lib.rs +++ b/starky/src/lib.rs @@ -22,6 +22,7 @@ pub mod starks; pub mod util; pub mod vanishing_poly; pub mod verifier; +pub mod cross_table_lookup; #[cfg(test)] pub mod fibonacci_stark; diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 6684aee5d5..528c167235 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -27,6 +27,8 @@ pub struct StarkProof, C: GenericConfig, pub permutation_zs_cap: Option>, /// Merkle cap of LDEs of trace values. pub quotient_polys_cap: MerkleCap, + /// Merkle cap of LDEs of cross-table-lookup Z values + pub ctl_zs_cap: Option>, /// Purported values of each polynomial at the challenge point. pub openings: StarkOpeningSet, /// A batch FRI argument for all openings. @@ -130,6 +132,14 @@ pub struct StarkOpeningSet, const D: usize> { pub permutation_zs: Option>, pub permutation_zs_next: Option>, pub quotient_polys: Vec, + /// Openings of cross-table-lookup `Z` polynomials at `zeta`. + pub ctl_zs: Option>, + /// Openings of cross-table-lookup `Z` polynomials at `g * zeta`. + pub ctl_zs_next: Option>, + /// Openings of cross-table lookup `Z` polynomials at `g^-1`. + pub ctl_zs_last: Option>, + /// Opening of cross-table lookup `Z` polynomials at `g^0` + pub ctl_zs_first: Option>, } impl, const D: usize> StarkOpeningSet { @@ -138,7 +148,9 @@ impl, const D: usize> StarkOpeningSet { g: F, trace_commitment: &PolynomialBatch, permutation_zs_commitment: Option<&PolynomialBatch>, + ctl_zs_commitment: Option<&PolynomialBatch>, quotient_commitment: &PolynomialBatch, + degree_bits: usize, ) -> Self { let eval_commitment = |z: F::Extension, c: &PolynomialBatch| { c.polynomials @@ -146,12 +158,29 @@ impl, const D: usize> StarkOpeningSet { .map(|p| p.to_extension().eval(z)) .collect::>() }; + let eval_commitment_base = |z: F, c: &PolynomialBatch| { + c.polynomials + .par_iter() + .map(|p| p.eval(z)) + .collect::>() + }; + let zeta_next = zeta.scalar_mul(g); + + let ctl_zs_first = ctl_zs_commitment.map(|c| eval_commitment_base(F::ONE, c).to_vec()); + let ctl_zs_last = ctl_zs_commitment.map(|c| { + eval_commitment_base(F::primitive_root_of_unity(degree_bits).inverse(), c).to_vec() + }); + Self { local_values: eval_commitment(zeta, trace_commitment), next_values: eval_commitment(zeta_next, trace_commitment), permutation_zs: permutation_zs_commitment.map(|c| eval_commitment(zeta, c)), permutation_zs_next: permutation_zs_commitment.map(|c| eval_commitment(zeta_next, c)), + ctl_zs: ctl_zs_commitment.map(|c| eval_commitment(zeta, c)), + ctl_zs_next: ctl_zs_commitment.map(|c| eval_commitment(zeta_next, c)), + ctl_zs_first, + ctl_zs_last, quotient_polys: eval_commitment(zeta, quotient_commitment), } } @@ -162,6 +191,7 @@ impl, const D: usize> StarkOpeningSet { .local_values .iter() .chain(self.permutation_zs.iter().flatten()) + .chain(self.ctl_zs.iter().flatten()) .chain(&self.quotient_polys) .copied() .collect(), @@ -171,12 +201,42 @@ impl, const D: usize> StarkOpeningSet { .next_values .iter() .chain(self.permutation_zs_next.iter().flatten()) + .chain(self.ctl_zs.iter().flatten()) .copied() .collect(), }; - FriOpenings { - batches: vec![zeta_batch, zeta_next_batch], + + let ctl_first_last_batches = match (self.ctl_zs_first.as_ref(), self.ctl_zs_last.as_ref()) { + (Some(first), Some(last)) => { + let first_batch = FriOpeningBatch { + values: first + .iter() + .copied() + .map(F::Extension::from_basefield) + .collect(), + }; + + let last_batch = FriOpeningBatch { + values: last + .iter() + .copied() + .map(F::Extension::from_basefield) + .collect(), + }; + + Some((first_batch, last_batch)) + } + (None, None) => None, + _ => panic!("ctl_zs_first.is_some() != ctl_zs_last.is_some()"), + }; + + let mut batches = vec![zeta_batch, zeta_next_batch]; + if let Some((first_batch, last_batch)) = ctl_first_last_batches { + batches.push(first_batch); + batches.push(last_batch); } + + FriOpenings { batches } } } diff --git a/starky/src/prover.rs b/starky/src/prover.rs index 1cfc0a2f1a..bc9aa8d806 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -19,6 +19,7 @@ use plonky2::util::{log2_ceil, log2_strict, transpose}; use crate::config::StarkConfig; use crate::consumer::basic::ConstraintConsumer; +use crate::cross_table_lookup::{CtlTableData, CtlCheckVars, CtlLinearCombChallenge, CtlChallenge, CtlColSet}; use crate::ir::Registers; use crate::permutation::{ compute_permutation_z_polys, get_n_permutation_challenge_sets, PermutationChallengeSet, @@ -28,10 +29,65 @@ use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; -pub fn prove( - stark: S, +// Compute all STARK trace commitments. +fn compute_trace_commitments( config: &StarkConfig, - trace_poly_values: Vec>, + trace_poly_values: &[Vec>], + timing: &mut TimingTree, +) -> Result>> +where + F: RichField + Extendable, + C: GenericConfig, +{ + let rate_bits = config.fri_config.rate_bits; + let cap_height = config.fri_config.cap_height; + + Ok(timed!( + timing, + "compute trace commitments", + trace_poly_values + .par_iter() + .cloned() + .map(|trace| { + let mut timing = TimingTree::default(); + PolynomialBatch::::from_values( + // TODO: Cloning this isn't great; consider having `from_values` accept a reference, + // or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`. + trace, + rate_bits, + false, + cap_height, + &mut timing, + None, + ) + }) + .collect::>() + )) +} + +/// Make a new challenger, compute all STARK trace commitments and observe them in the challenger +pub fn start_multi_table_prover( + config: &StarkConfig, + trace_poly_values: &[Vec>], + timing: &mut TimingTree, +) -> Result<(Vec>, Challenger)> +where + F: RichField + Extendable, + C: GenericConfig, +{ + let trace_commitments = compute_trace_commitments(config, trace_poly_values, timing)?; + let mut challenger = Challenger::::new(); + for cap in trace_commitments.iter().map(|c| &c.merkle_tree.cap) { + challenger.observe_cap(cap); + } + + Ok((trace_commitments, challenger)) +} + +pub fn prove_no_ctl( + stark: &S, + config: &StarkConfig, + trace_poly_values: &[PolynomialValues], public_inputs: Vec, timing: &mut TimingTree, ) -> Result> @@ -58,7 +114,7 @@ where PolynomialBatch::::from_values( // TODO: Cloning this isn't great; consider having `from_values` accept a reference, // or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`. - trace_poly_values.clone(), + trace_poly_values.to_vec(), rate_bits, false, cap_height, @@ -71,11 +127,51 @@ where let mut challenger = Challenger::new(); challenger.observe_cap(&trace_cap); + prove_single_table( + stark, + config, + &trace_poly_values, + &trace_commitment, + None, + &public_inputs, + &mut challenger, + timing, + ) +} + +pub fn prove_single_table( + stark: &S, + config: &StarkConfig, + trace_poly_values: &[PolynomialValues], + trace_commitment: &PolynomialBatch, + ctl_data: Option<&CtlTableData>, + public_inputs: &[F], + challenger: &mut Challenger, + timing: &mut TimingTree, +) -> Result> +where + F: RichField + Extendable, + C: GenericConfig, + S: Stark> + + Stark<::Packing, ConstraintConsumer<::Packing>> + + Sync, +{ + + let degree = trace_poly_values[0].len(); + let degree_bits = log2_strict(degree); + let fri_params = config.fri_params(degree_bits); + let rate_bits = config.fri_config.rate_bits; + let cap_height = config.fri_config.cap_height; + assert!( + fri_params.total_arities() <= degree_bits + rate_bits - cap_height, + "FRI total reduction arity is too large.", + ); + // Permutation arguments. let permutation_zs_commitment_challenges = stark.metadata().uses_permutation_args().then(|| { let permutation_challenge_sets = get_n_permutation_challenge_sets( - &mut challenger, + challenger, config.num_challenges, stark.metadata().permutation_batch_size(), ); @@ -109,11 +205,42 @@ where challenger.observe_cap(cap); } + + let ctl_zs_commitment_challenges_cols = ctl_data.map(|ctl_data| { + let zs = ctl_data.zs(); + let commitment = timed!( + timing, + "compute CTL Z commitments", + PolynomialBatch::::from_values( + zs, + rate_bits, + false, + config.fri_config.cap_height, + timing, + None + ) + ); + let challenges = ctl_data.challenges(); + let cols = ctl_data.cols(); + (commitment, challenges, cols) + }); + + let ctl_zs_commitment = ctl_zs_commitment_challenges_cols + .as_ref() + .map(|(comm, _, _)| comm); + let ctl_zs_cap = ctl_zs_commitment + .as_ref() + .map(|c| c.merkle_tree.cap.clone()); + if let Some(cap) = &ctl_zs_cap { + challenger.observe_cap(cap); + } + let alphas = challenger.get_n_challenges(config.num_challenges); let quotient_polys = compute_quotient_polys::::Packing, C, S, D>( &stark, &trace_commitment, &permutation_zs_commitment_challenges, + &ctl_zs_commitment_challenges_cols, &public_inputs, alphas, degree_bits, @@ -158,12 +285,15 @@ where g, &trace_commitment, permutation_zs_commitment, + ctl_zs_commitment, "ient_commitment, + degree_bits, ); challenger.observe_openings(&openings.to_fri_openings()); - let initial_merkle_trees = once(&trace_commitment) + let initial_merkle_trees = once(trace_commitment) .chain(permutation_zs_commitment) + .chain(ctl_zs_commitment) .chain(once("ient_commitment)) .collect::>(); @@ -171,16 +301,23 @@ where timing, "compute openings proof", PolynomialBatch::prove_openings( - &stark.metadata().fri_instance(zeta, g, config), + &stark.metadata().fri_instance( + zeta, + g, + config, + ctl_data.map(|data| data.num_zs()).unwrap_or(0), + degree_bits + ), &initial_merkle_trees, - &mut challenger, + challenger, &fri_params, timing, ) ); let proof = StarkProof { - trace_cap, + trace_cap: trace_commitment.merkle_tree.cap.clone(), permutation_zs_cap, + ctl_zs_cap, quotient_polys_cap, openings, opening_proof, @@ -202,6 +339,11 @@ fn compute_quotient_polys<'a, F, P, C, S, const D: usize>( PolynomialBatch, Vec>, )>, + ctl_zs_commitment_challenges_cols: &'a Option<( + PolynomialBatch, + (Vec>, Vec>), + Vec, + )>, public_inputs: &[F], alphas: Vec, degree_bits: usize, @@ -245,6 +387,22 @@ where size, ); + let ctl_zs_first_last = ctl_zs_commitment_challenges_cols.as_ref().map(|(c, _, _)| { + let mut ctl_zs_first = Vec::with_capacity(c.polynomials.len()); + let mut ctl_zs_last = Vec::with_capacity(c.polynomials.len()); + c.polynomials + .par_iter() + .map(|p| { + ( + p.eval(F::ONE), + p.eval(F::primitive_root_of_unity(degree_bits).inverse()), + ) + }) + .unzip_into_vecs(&mut ctl_zs_first, &mut ctl_zs_last); + + (ctl_zs_first, ctl_zs_last) + }); + // We will step by `P::WIDTH`, and in each iteration, evaluate the quotient polynomial at // a batch of `P::WIDTH` points. let quotient_values = (0..size) @@ -281,11 +439,34 @@ where permutation_challenge_sets: permutation_challenge_sets.to_vec(), }, ); + + let ctl_vars = + ctl_zs_commitment_challenges_cols + .as_ref() + .map(|(commitment, challenges, cols)| { + let local_zs = commitment.get_lde_values_packed(i_start, step); + let next_zs = commitment.get_lde_values_packed(i_next_start, step); + let (linear_comb_challenges, challenges) = challenges.clone(); + let cols = cols.clone(); + let (first_zs, last_zs) = ctl_zs_first_last.clone().unwrap(); + + CtlCheckVars { + local_zs, + next_zs, + first_zs, + last_zs, + linear_comb_challenges, + challenges, + cols, + } + }); + eval_vanishing_poly::( stark, config, vars, permutation_check_data, + ctl_vars.as_ref(), &mut consumer, ); diff --git a/starky/src/stark.rs b/starky/src/stark.rs index 5bbf540dc1..68accc4104 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -1,7 +1,10 @@ use alloc::vec; use alloc::vec::Vec; -use plonky2::field::extension::{Extendable, FieldExtension}; +use plonky2::field::{ + extension::{Extendable, FieldExtension}, + types::Field, +}; use plonky2::fri::structure::{ FriBatchInfo, FriBatchInfoTarget, FriInstanceInfo, FriInstanceInfoTarget, FriOracleInfo, FriPolynomialInfo, @@ -170,6 +173,8 @@ impl StarkMetadata { zeta: F::Extension, g: F, config: &StarkConfig, + num_ctl_zs: usize, + degree_bits: usize, ) -> FriInstanceInfo where F: RichField + Extendable, @@ -180,6 +185,7 @@ impl StarkMetadata { num_polys: self.columns, blinding: false, }); + let permutation_zs_info = if self.uses_permutation_args() { let num_z_polys = self.num_permutation_batches(config); let polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_z_polys); @@ -191,26 +197,58 @@ impl StarkMetadata { } else { vec![] }; + let num_quotient_polys = self.quotient_degree_factor() * config.num_challenges; let quotient_info = FriPolynomialInfo::from_range(oracles.len(), 0..num_quotient_polys); oracles.push(FriOracleInfo { num_polys: num_quotient_polys, blinding: false, }); + + let ctl_zs_oracle_info = if num_ctl_zs > 0 { + let polys = FriPolynomialInfo::from_range(oracles.len(), 0..num_ctl_zs); + oracles.push(FriOracleInfo { + num_polys: num_ctl_zs, + blinding: false, + }); + polys + } else { + vec![] + }; + let zeta_batch = FriBatchInfo { point: zeta, polynomials: [ trace_info.clone(), permutation_zs_info.clone(), + ctl_zs_oracle_info.clone(), quotient_info, ] .concat(), }; let zeta_next_batch = FriBatchInfo { point: zeta.scalar_mul(g), - polynomials: [trace_info, permutation_zs_info].concat(), + polynomials: [trace_info, permutation_zs_info, ctl_zs_oracle_info.clone()].concat(), }; - let batches = vec![zeta_batch, zeta_next_batch]; + + + let mut batches = vec![zeta_batch, zeta_next_batch]; + + if ctl_zs_oracle_info.len() > 0 { + let zeta_first_batch = FriBatchInfo { + point: F::Extension::ONE, + polynomials: ctl_zs_oracle_info.clone() + }; + + let zeta_last_batch = FriBatchInfo { + point: F::Extension::primitive_root_of_unity(degree_bits).inverse(), + polynomials: ctl_zs_oracle_info + }; + + batches.push(zeta_first_batch); + batches.push(zeta_last_batch); + } + FriInstanceInfo { oracles, batches } } diff --git a/starky/src/vanishing_poly.rs b/starky/src/vanishing_poly.rs index 4d20b4e48c..c03b78d82f 100644 --- a/starky/src/vanishing_poly.rs +++ b/starky/src/vanishing_poly.rs @@ -7,6 +7,7 @@ use plonky2::plonk::config::GenericConfig; use crate::config::StarkConfig; use crate::consumer::basic::{ConstraintConsumer, RecursiveConstraintConsumer}; +use crate::cross_table_lookup::CtlCheckVars; use crate::ir::Registers; use crate::permutation::{ eval_permutation_checks, eval_permutation_checks_circuit, PermutationCheckDataTarget, @@ -19,6 +20,7 @@ pub(crate) fn eval_vanishing_poly, permutation_data: Option>, + ctl_vars: Option<&CtlCheckVars>, consumer: &mut ConstraintConsumer

, ) where F: RichField + Extendable, diff --git a/starky/src/verifier.rs b/starky/src/verifier.rs index 9b5a0cd18e..613cdf45ab 100644 --- a/starky/src/verifier.rs +++ b/starky/src/verifier.rs @@ -1,4 +1,5 @@ use alloc::vec::Vec; +use plonky2::iop::challenger::Challenger; use core::iter::once; use anyhow::{anyhow, ensure, Result}; @@ -12,26 +13,45 @@ use plonky2::plonk::plonk_common::reduce_with_powers; use crate::config::StarkConfig; use crate::consumer::basic::ConstraintConsumer; +use crate::cross_table_lookup::CtlCheckVars; use crate::ir::Registers; use crate::permutation::PermutationCheckVars; use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs}; use crate::stark::Stark; use crate::vanishing_poly::eval_vanishing_poly; -pub fn verify_stark_proof< +pub fn verify_stark_proof_no_ctl< F: RichField + Extendable, C: GenericConfig, S: Stark> + Stark>, const D: usize, >( - stark: S, + stark: &S, + proof_with_pis: StarkProofWithPublicInputs, + config: &StarkConfig, +) -> Result<()> { + ensure!(proof_with_pis.public_inputs.len() == stark.metadata().public_inputs); + let degree_bits = proof_with_pis.proof.recover_degree_bits(config); + let challenges = proof_with_pis.get_stark_challenges_no_ctl(stark, config, degree_bits); + verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, None, degree_bits, config) +} + +pub fn verify_stark_proof_with_ctl< + F: RichField + Extendable, + C: GenericConfig, + S: Stark> + Stark>, + const D: usize, +>( + stark: &S, proof_with_pis: StarkProofWithPublicInputs, + ctl_vars: &CtlCheckVars, + challenger: &mut Challenger, config: &StarkConfig, ) -> Result<()> { ensure!(proof_with_pis.public_inputs.len() == stark.metadata().public_inputs); let degree_bits = proof_with_pis.proof.recover_degree_bits(config); - let challenges = proof_with_pis.get_challenges(&stark, config, degree_bits); - verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, degree_bits, config) + let challenges = proof_with_pis.get_stark_challenges_with_ctl(stark, config, challenger, degree_bits); + verify_stark_proof_with_challenges(stark, proof_with_pis, challenges, Some(ctl_vars), degree_bits, config) } pub(crate) fn verify_stark_proof_with_challenges< @@ -40,14 +60,16 @@ pub(crate) fn verify_stark_proof_with_challenges< S: Stark> + Stark>, const D: usize, >( - stark: S, + stark: &S, proof_with_pis: StarkProofWithPublicInputs, challenges: StarkProofChallenges, + ctl_vars: Option<&CtlCheckVars>, degree_bits: usize, config: &StarkConfig, ) -> Result<()> { - validate_proof_shape(&stark, &proof_with_pis, config)?; - check_permutation_options(&stark, &proof_with_pis, &challenges)?; + // validate_proof_shape(&stark, &proof_with_pis, config)?; + check_permutation_options(stark, &proof_with_pis, &challenges)?; + let StarkProofWithPublicInputs { proof, public_inputs, @@ -57,6 +79,10 @@ pub(crate) fn verify_stark_proof_with_challenges< next_values, permutation_zs, permutation_zs_next, + ctl_zs: _, + ctl_zs_next: _, + ctl_zs_first: _, + ctl_zs_last, quotient_polys, } = &proof.openings; let vars = Registers { @@ -95,6 +121,7 @@ pub(crate) fn verify_stark_proof_with_challenges< config, vars, permutation_data, + ctl_vars, &mut consumer, ); let vanishing_polys_zeta = consumer.into_accumulators(); @@ -119,6 +146,7 @@ pub(crate) fn verify_stark_proof_with_challenges< let merkle_caps = once(proof.trace_cap) .chain(proof.permutation_zs_cap) + .chain(proof.ctl_zs_cap.clone()) .chain(once(proof.quotient_polys_cap)) .collect::>(); @@ -127,6 +155,8 @@ pub(crate) fn verify_stark_proof_with_challenges< challenges.stark_zeta, F::primitive_root_of_unity(degree_bits), config, + ctl_zs_last.as_ref().map(|zs| zs.len()).unwrap_or(0), + degree_bits ), &proof.openings.to_fri_openings(), &challenges.fri_challenges, @@ -138,75 +168,75 @@ pub(crate) fn verify_stark_proof_with_challenges< Ok(()) } -fn validate_proof_shape( - stark: &S, - proof_with_pis: &StarkProofWithPublicInputs, - config: &StarkConfig, -) -> anyhow::Result<()> -where - F: RichField + Extendable, - C: GenericConfig, - S: Stark>, -{ - let StarkProofWithPublicInputs { - proof, - public_inputs, - } = proof_with_pis; - let degree_bits = proof.recover_degree_bits(config); +// fn validate_proof_shape( +// stark: &S, +// proof_with_pis: &StarkProofWithPublicInputs, +// config: &StarkConfig, +// ) -> anyhow::Result<()> +// where +// F: RichField + Extendable, +// C: GenericConfig, +// S: Stark>, +// { +// let StarkProofWithPublicInputs { +// proof, +// public_inputs, +// } = proof_with_pis; +// let degree_bits = proof.recover_degree_bits(config); - let StarkProof { - trace_cap, - permutation_zs_cap, - quotient_polys_cap, - openings, - // The shape of the opening proof will be checked in the FRI verifier (see - // validate_fri_proof_shape), so we ignore it here. - opening_proof: _, - } = proof; +// let StarkProof { +// trace_cap, +// permutation_zs_cap, +// quotient_polys_cap, +// openings, +// // The shape of the opening proof will be checked in the FRI verifier (see +// // validate_fri_proof_shape), so we ignore it here. +// opening_proof: _, +// } = proof; - let StarkOpeningSet { - local_values, - next_values, - permutation_zs, - permutation_zs_next, - quotient_polys, - } = openings; +// let StarkOpeningSet { +// local_values, +// next_values, +// permutation_zs, +// permutation_zs_next, +// quotient_polys, +// } = openings; - ensure!(public_inputs.len() == stark.metadata().public_inputs); +// ensure!(public_inputs.len() == stark.metadata().public_inputs); - let fri_params = config.fri_params(degree_bits); - let cap_height = fri_params.config.cap_height; - let num_zs = stark.metadata().num_permutation_batches(config); +// let fri_params = config.fri_params(degree_bits); +// let cap_height = fri_params.config.cap_height; +// let num_zs = stark.metadata().num_permutation_batches(config); - ensure!(trace_cap.height() == cap_height); - ensure!(quotient_polys_cap.height() == cap_height); +// ensure!(trace_cap.height() == cap_height); +// ensure!(quotient_polys_cap.height() == cap_height); - ensure!(local_values.len() == stark.metadata().columns); - ensure!(next_values.len() == stark.metadata().columns); - ensure!(quotient_polys.len() == stark.metadata().num_quotient_polys(config)); +// ensure!(local_values.len() == stark.metadata().columns); +// ensure!(next_values.len() == stark.metadata().columns); +// ensure!(quotient_polys.len() == stark.metadata().num_quotient_polys(config)); - if stark.metadata().uses_permutation_args() { - let permutation_zs_cap = permutation_zs_cap - .as_ref() - .ok_or_else(|| anyhow!("Missing Zs cap"))?; - let permutation_zs = permutation_zs - .as_ref() - .ok_or_else(|| anyhow!("Missing permutation_zs"))?; - let permutation_zs_next = permutation_zs_next - .as_ref() - .ok_or_else(|| anyhow!("Missing permutation_zs_next"))?; +// if stark.metadata().uses_permutation_args() { +// let permutation_zs_cap = permutation_zs_cap +// .as_ref() +// .ok_or_else(|| anyhow!("Missing Zs cap"))?; +// let permutation_zs = permutation_zs +// .as_ref() +// .ok_or_else(|| anyhow!("Missing permutation_zs"))?; +// let permutation_zs_next = permutation_zs_next +// .as_ref() +// .ok_or_else(|| anyhow!("Missing permutation_zs_next"))?; - ensure!(permutation_zs_cap.height() == cap_height); - ensure!(permutation_zs.len() == num_zs); - ensure!(permutation_zs_next.len() == num_zs); - } else { - ensure!(permutation_zs_cap.is_none()); - ensure!(permutation_zs.is_none()); - ensure!(permutation_zs_next.is_none()); - } +// ensure!(permutation_zs_cap.height() == cap_height); +// ensure!(permutation_zs.len() == num_zs); +// ensure!(permutation_zs_next.len() == num_zs); +// } else { +// ensure!(permutation_zs_cap.is_none()); +// ensure!(permutation_zs.is_none()); +// ensure!(permutation_zs_next.is_none()); +// } - Ok(()) -} +// Ok(()) +// } /// Evaluate the Lagrange polynomials `L_0` and `L_(n-1)` at a point `x`. /// `L_0(x) = (x^n - 1)/(n * (x - 1))` From cc092a6405c12e714a4c0213ec366d4520b76d6e Mon Sep 17 00:00:00 2001 From: Sladuca Date: Sat, 4 Feb 2023 10:31:50 -0500 Subject: [PATCH 2/3] arithmetization itself is broken --- starky/src/proof.rs | 46 ++++++++++++++++++++------------------------ starky/src/prover.rs | 1 - starky/src/stark.rs | 2 +- 3 files changed, 22 insertions(+), 27 deletions(-) diff --git a/starky/src/proof.rs b/starky/src/proof.rs index 528c167235..54becb314a 100644 --- a/starky/src/proof.rs +++ b/starky/src/proof.rs @@ -201,38 +201,34 @@ impl, const D: usize> StarkOpeningSet { .next_values .iter() .chain(self.permutation_zs_next.iter().flatten()) - .chain(self.ctl_zs.iter().flatten()) + .chain(self.ctl_zs_next.iter().flatten()) .copied() .collect(), }; - let ctl_first_last_batches = match (self.ctl_zs_first.as_ref(), self.ctl_zs_last.as_ref()) { - (Some(first), Some(last)) => { - let first_batch = FriOpeningBatch { - values: first - .iter() - .copied() - .map(F::Extension::from_basefield) - .collect(), - }; - - let last_batch = FriOpeningBatch { - values: last - .iter() - .copied() - .map(F::Extension::from_basefield) - .collect(), - }; + let mut batches = vec![zeta_batch, zeta_next_batch]; - Some((first_batch, last_batch)) - } - (None, None) => None, - _ => panic!("ctl_zs_first.is_some() != ctl_zs_last.is_some()"), - }; + assert!(self.ctl_zs_first.is_some() == self.ctl_zs_last.is_some()); - let mut batches = vec![zeta_batch, zeta_next_batch]; - if let Some((first_batch, last_batch)) = ctl_first_last_batches { + if let Some(zs_first) = self.ctl_zs_first.as_ref() { + let first_batch = FriOpeningBatch { + values: zs_first + .iter() + .copied() + .map(F::Extension::from_basefield) + .collect(), + }; batches.push(first_batch); + } + + if let Some(zs_last) = self.ctl_zs_last.as_ref() { + let last_batch = FriOpeningBatch { + values: zs_last + .iter() + .copied() + .map(F::Extension::from_basefield) + .collect(), + }; batches.push(last_batch); } diff --git a/starky/src/prover.rs b/starky/src/prover.rs index bc9aa8d806..18f4edb281 100644 --- a/starky/src/prover.rs +++ b/starky/src/prover.rs @@ -205,7 +205,6 @@ where challenger.observe_cap(cap); } - let ctl_zs_commitment_challenges_cols = ctl_data.map(|ctl_data| { let zs = ctl_data.zs(); let commitment = timed!( diff --git a/starky/src/stark.rs b/starky/src/stark.rs index 68accc4104..26c51438c9 100644 --- a/starky/src/stark.rs +++ b/starky/src/stark.rs @@ -248,7 +248,7 @@ impl StarkMetadata { batches.push(zeta_first_batch); batches.push(zeta_last_batch); } - + FriInstanceInfo { oracles, batches } } From d588df209ac1061e24edef2bf9888f89e55d4289 Mon Sep 17 00:00:00 2001 From: Sladuca Date: Sat, 4 Feb 2023 10:40:40 -0500 Subject: [PATCH 3/3] make everything use prove_no_ctl --- starky/src/cross_table_lookup.rs | 12 ++++++------ starky/src/starks/rw_memory/mod.rs | 8 ++++---- starky/src/starks/stack/mod.rs | 8 ++++---- starky/src/starks/xor/mod.rs | 8 ++++---- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/starky/src/cross_table_lookup.rs b/starky/src/cross_table_lookup.rs index f9680fa1b7..e0cca803ab 100644 --- a/starky/src/cross_table_lookup.rs +++ b/starky/src/cross_table_lookup.rs @@ -463,7 +463,7 @@ pub(crate) fn eval_cross_table_lookup_checks Result<()> { @@ -121,8 +121,8 @@ mod tests { let stark = S::default(); let trace = generator.into_polynomial_values(); let mut timing = TimingTree::default(); - let proof = prove::(stark, &config, trace, vec![], &mut timing)?; - verify_stark_proof(stark, proof, &config)?; + let proof = prove_no_ctl::(&stark, &config, &trace, vec![], &mut timing)?; + verify_stark_proof_no_ctl(&stark, proof, &config)?; Ok(()) } diff --git a/starky/src/starks/stack/mod.rs b/starky/src/starks/stack/mod.rs index ecfb06ea17..9df636e474 100644 --- a/starky/src/starks/stack/mod.rs +++ b/starky/src/starks/stack/mod.rs @@ -68,10 +68,10 @@ mod tests { use super::*; use crate::config::StarkConfig; - use crate::prover::prove; + use crate::prover::prove_no_ctl; use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; use crate::starks::stack::generation::StackGenerator; - use crate::verifier::verify_stark_proof; + use crate::verifier::verify_stark_proof_no_ctl; #[test] fn test_stark_degree() -> Result<()> { @@ -122,8 +122,8 @@ mod tests { let stark = S::default(); let trace = generator.into_polynomial_values(); let mut timing = TimingTree::default(); - let proof = prove::(stark, &config, trace, vec![], &mut timing)?; - verify_stark_proof(stark, proof, &config)?; + let proof = prove_no_ctl::(&stark, &config, &trace, vec![], &mut timing)?; + verify_stark_proof_no_ctl(&stark, proof, &config)?; Ok(()) } diff --git a/starky/src/starks/xor/mod.rs b/starky/src/starks/xor/mod.rs index 55bcf1ce85..956c96754f 100644 --- a/starky/src/starks/xor/mod.rs +++ b/starky/src/starks/xor/mod.rs @@ -236,9 +236,9 @@ mod tests { use super::*; use crate::config::StarkConfig; - use crate::prover::prove; + use crate::prover::prove_no_ctl; use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree}; - use crate::verifier::verify_stark_proof; + use crate::verifier::verify_stark_proof_no_ctl; macro_rules! test_xor { ($n:expr, $fn_name:ident) => { @@ -270,8 +270,8 @@ mod tests { let trace = generator.into_polynomial_values(); let mut timing = TimingTree::default(); - let proof = prove::(stark, &config, trace, vec![], &mut timing)?; - verify_stark_proof(stark, proof, &config) + let proof = prove_no_ctl::(&stark, &config, &trace, vec![], &mut timing)?; + verify_stark_proof_no_ctl(&stark, proof, &config) } } };