From 9d0021402c4ef83128dcd339606002b8148d8209 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Mon, 15 Sep 2025 17:46:16 +0800 Subject: [PATCH 01/34] SPARK prover with 2 gpa combined --- Cargo.toml | 1 + provekit/common/src/lib.rs | 2 +- spark-prover/Cargo.toml | 26 ++ spark-prover/README.md | 22 ++ spark-prover/src/bin/generate_test_r1cs.rs | 24 ++ spark-prover/src/bin/generate_test_request.rs | 27 ++ spark-prover/src/bin/spark-verifier.rs | 255 ++++++++++++++++++ spark-prover/src/gpa.rs | 183 +++++++++++++ spark-prover/src/lib.rs | 5 + spark-prover/src/main.rs | 54 ++++ spark-prover/src/memory.rs | 55 ++++ spark-prover/src/spark.rs | 192 +++++++++++++ spark-prover/src/utilities/iopattern/mod.rs | 54 ++++ spark-prover/src/utilities/matrix/mod.rs | 86 ++++++ spark-prover/src/utilities/mod.rs | 76 ++++++ spark-prover/src/whir.rs | 71 +++++ 16 files changed, 1132 insertions(+), 1 deletion(-) create mode 100644 spark-prover/Cargo.toml create mode 100644 spark-prover/README.md create mode 100644 spark-prover/src/bin/generate_test_r1cs.rs create mode 100644 spark-prover/src/bin/generate_test_request.rs create mode 100644 spark-prover/src/bin/spark-verifier.rs create mode 100644 spark-prover/src/gpa.rs create mode 100644 spark-prover/src/lib.rs create mode 100644 spark-prover/src/main.rs create mode 100644 spark-prover/src/memory.rs create mode 100644 spark-prover/src/spark.rs create mode 100644 spark-prover/src/utilities/iopattern/mod.rs create mode 100644 spark-prover/src/utilities/matrix/mod.rs create mode 100644 spark-prover/src/utilities/mod.rs create mode 100644 spark-prover/src/whir.rs diff --git a/Cargo.toml b/Cargo.toml index ae6eda32..f2349477 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,7 @@ members = [ "tooling/cli", "tooling/provekit-bench", "tooling/provekit-gnark", + "spark-prover", ] exclude = [ "playground/passport-input-gen", diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs index b60f6921..0e9288f2 100644 --- a/provekit/common/src/lib.rs +++ b/provekit/common/src/lib.rs @@ -10,9 +10,9 @@ pub mod witness; use crate::{ interner::{InternedFieldElement, Interner}, - sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, }; pub use { + sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, acir::FieldElement as NoirElement, noir_proof_scheme::{NoirProof, NoirProofScheme}, r1cs::R1CS, diff --git a/spark-prover/Cargo.toml b/spark-prover/Cargo.toml new file mode 100644 index 00000000..7a488255 --- /dev/null +++ b/spark-prover/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "spark-prover" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +provekit-common.workspace = true +provekit-r1cs-compiler.workspace = true +serde_json.workspace = true +serde.workspace = true +anyhow.workspace = true +spongefish.workspace = true +whir.workspace = true +ark-std.workspace = true +ark-ff.workspace = true +itertools = "0.14.0" + + +[lints] +workspace = true + diff --git a/spark-prover/README.md b/spark-prover/README.md new file mode 100644 index 00000000..b9e5f93e --- /dev/null +++ b/spark-prover/README.md @@ -0,0 +1,22 @@ +# SPARK +Experimental Rust prover and gnark recursive prover circuit will be implemented and optimized here. + +## Running SPARK (under development) +```cargo run --bin spark-prover``` + +## Test R1CS generation (for development) +A development utility is provided to generate test matrices. +To generate a test R1CS, run the following command: + +```cargo run -p spark-prover --bin generate_test_r1cs``` + +## Test request generation (for development) +A development utility is provided to generate test requests. +To generate a test request, run the following command: + +```cargo run -p spark-prover --bin generate_test_request``` + +## Reference SPARK verifier (for development) +A reference SPARK verifier is implemented to test the correctness of the SPARK proof while being a reference implementation for the gnark verifier circuit. + +```cargo run -p spark-prover --bin spark-verifier``` \ No newline at end of file diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs new file mode 100644 index 00000000..1a1cb7e0 --- /dev/null +++ b/spark-prover/src/bin/generate_test_r1cs.rs @@ -0,0 +1,24 @@ +use { + provekit_common::{FieldElement, R1CS}, + std::{fs::File, io::Write}, +}; + +fn main() { + let mut r1cs = R1CS::new(); + r1cs.grow_matrices(1024, 512); + let interned_1 = r1cs.interner.intern(FieldElement::from(1)); + + for i in 0..64 { + r1cs.a.set(i, i, interned_1); + r1cs.b.set(i, i, interned_1); + r1cs.c.set(i, i, interned_1); + } + + let matrix_json = + serde_json::to_string(&r1cs).expect("Error: Failed to serialize R1CS to JSON"); + let mut request_file = File::create("spark-prover/r1cs.json") + .expect("Error: Failed to create the r1cs.json file"); + request_file + .write_all(matrix_json.as_bytes()) + .expect("Error: Failed to write JSON data to r1cs.json"); +} diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs new file mode 100644 index 00000000..4cc4f7f5 --- /dev/null +++ b/spark-prover/src/bin/generate_test_request.rs @@ -0,0 +1,27 @@ +use { + provekit_common::FieldElement, + spark_prover::utilities::{ClaimedValues, Point, SPARKRequest}, + std::{fs::File, io::Write}, +}; + +fn main() { + let spark_request = SPARKRequest { + point_to_evaluate: Point { + row: vec![FieldElement::from(0); 10], + col: vec![FieldElement::from(0); 9], + }, + claimed_values: ClaimedValues { + a: FieldElement::from(1), + b: FieldElement::from(1), + c: FieldElement::from(1), + }, + }; + + let request_json = + serde_json::to_string(&spark_request).expect("Error: Failed to serialize R1CS to JSON"); + let mut request_file = File::create("spark-prover/request.json") + .expect("Error: Failed to create the request.json file"); + request_file + .write_all(request_json.as_bytes()) + .expect("Error: Failed to write JSON data to request.json"); +} diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs new file mode 100644 index 00000000..9334ee82 --- /dev/null +++ b/spark-prover/src/bin/spark-verifier.rs @@ -0,0 +1,255 @@ +use { + anyhow::{ensure, Context, Result}, + ark_std::{One, Zero}, + provekit_common::{ + utils::{ + next_power_of_two, + sumcheck::{calculate_eq, eval_cubic_poly}, + }, + FieldElement, IOPattern, skyscraper::SkyscraperSponge, + }, + spark_prover::utilities::{SPARKProof, SPARKRequest}, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, + VerifierState, + }, + std::fs::{self, File}, + whir::{ + poly_utils::multilinear::MultilinearPoint, + whir::{ + committer::CommitmentReader, + statement::{Statement, Weights}, + utils::HintDeserialize, + verifier::Verifier, + }, + }, +}; + +fn main() -> Result<()> { + let spark_proof_json_str = fs::read_to_string("spark-prover/spark_proof.json") + .context("Error: Failed to open the r1cs.json file")?; + let spark_proof: SPARKProof = serde_json::from_str(&spark_proof_json_str) + .context("Error: Failed to deserialize JSON to R1CS")?; + + let request_json_str = fs::read_to_string("spark-prover/request.json") + .context("Error: Failed to open the r1cs.json file")?; + let request: SPARKRequest = serde_json::from_str(&request_json_str) + .context("Error: Failed to deserialize JSON to R1CS")?; + + let io = IOPattern::from_string(spark_proof.io_pattern); + let mut arthur = io.to_verifier_state(&spark_proof.transcript); + + let commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a); + let commitment_reader_row = CommitmentReader::new(&spark_proof.whir_params.row); + + let val_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + let e_rx_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + let e_ry_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + let final_row_commitment = commitment_reader_row.parse_commitment(&mut arthur).unwrap(); + + let (randomness, last_sumcheck_value) = run_sumcheck_verifier_spark( + &mut arthur, + next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms), + request.claimed_values.a, + ) + .context("While verifying SPARK sumcheck")?; + + let final_folds: Vec = arthur.hint()?; + + let mut val_statement_verifier = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + val_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(randomness.clone())), + final_folds[0], + ); + let val_verifier = Verifier::new(&spark_proof.whir_params.a); + val_verifier + .verify(&mut arthur, &val_commitment, &val_statement_verifier) + .context("while verifying WHIR")?; + + let mut e_rx_statement_verifier = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + e_rx_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(randomness.clone())), + final_folds[1], + ); + let e_rx_verifier = Verifier::new(&spark_proof.whir_params.a); + e_rx_verifier + .verify(&mut arthur, &e_rx_commitment, &e_rx_statement_verifier) + .context("while verifying WHIR")?; + + let mut e_ry_statement_verifier = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + e_ry_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(randomness.clone())), + final_folds[2], + ); + let e_ry_verifier = Verifier::new(&spark_proof.whir_params.a); + e_ry_verifier + .verify(&mut arthur, &e_ry_commitment, &e_ry_statement_verifier) + .context("while verifying WHIR")?; + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + let gpa_result = gpa_sumcheck_verifier( + &mut arthur, + next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, + )?; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); + let init_cntr = FieldElement::from(0); + + let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + + let mut final_cntr: FieldElement = arthur.hint()?; + + let mut final_cntr_statement = + Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); + + let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); + final_cntr_verifier + .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; + + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); + + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.last_sumcheck_value); + + Ok(()) +} + +pub fn run_sumcheck_verifier_spark( + arthur: &mut VerifierState, + variable_count: usize, + initial_sumcheck_val: FieldElement, +) -> Result<(Vec, FieldElement)> { + let mut saved_val_for_sumcheck_equality_assertion = initial_sumcheck_val; + + let mut alpha = vec![FieldElement::zero(); variable_count]; + + for i in 0..variable_count { + let mut hhat_i = [FieldElement::zero(); 4]; + let mut alpha_i = [FieldElement::zero(); 1]; + let _ = arthur.fill_next_scalars(&mut hhat_i); + let _ = arthur.fill_challenge_scalars(&mut alpha_i); + alpha[i] = alpha_i[0]; + + let hhat_i_at_zero = eval_cubic_poly(&hhat_i, &FieldElement::zero()); + let hhat_i_at_one = eval_cubic_poly(&hhat_i, &FieldElement::one()); + ensure!( + saved_val_for_sumcheck_equality_assertion == hhat_i_at_zero + hhat_i_at_one, + "Sumcheck equality assertion failed" + ); + saved_val_for_sumcheck_equality_assertion = eval_cubic_poly(&hhat_i, &alpha_i[0]); + } + + Ok((alpha, saved_val_for_sumcheck_equality_assertion)) +} + +pub fn gpa_sumcheck_verifier( + arthur: &mut VerifierState, + height_of_binary_tree: usize, +) -> Result { + let mut prev_rand = Vec::::new(); + let mut rand = Vec::::new(); + let mut claimed_values = [FieldElement::from(0); 2]; + let mut l = [FieldElement::from(0); 2]; + let mut r = [FieldElement::from(0); 1]; + let mut h = [FieldElement::from(0); 4]; + let mut alpha = [FieldElement::from(0); 1]; + + arthur + .fill_next_scalars(&mut claimed_values) + .expect("Failed to fill next scalars"); + arthur + .fill_challenge_scalars(&mut r) + .expect("Failed to fill next scalars"); + let mut last_sumcheck_value = eval_linear_poly(&claimed_values, &r[0]); + + rand.push(r[0]); + prev_rand = rand; + rand = Vec::::new(); + + for i in 1..(height_of_binary_tree - 1) { + for _ in 0..i { + arthur + .fill_next_scalars(&mut h) + .expect("Failed to fill next scalars"); + arthur + .fill_challenge_scalars(&mut alpha) + .expect("Failed to fill next scalars"); + assert_eq!( + eval_cubic_poly(&h, &FieldElement::from(0)) + + eval_cubic_poly(&h, &FieldElement::from(1)), + last_sumcheck_value + ); + rand.push(alpha[0]); + last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); + } + arthur + .fill_next_scalars(&mut l) + .expect("Failed to fill next scalars"); + arthur + .fill_challenge_scalars(&mut r) + .expect("Failed to fill next scalars"); + let claimed_last_sch = calculate_eq(&prev_rand, &rand) + * eval_linear_poly(&l, &FieldElement::from(0)) + * eval_linear_poly(&l, &FieldElement::from(1)); + assert_eq!(claimed_last_sch, last_sumcheck_value); + rand.push(r[0]); + prev_rand = rand; + rand = Vec::::new(); + last_sumcheck_value = eval_linear_poly(&l, &r[0]); + } + + Ok(GPASumcheckResult { + claimed_values: claimed_values.to_vec(), + last_sumcheck_value, + randomness: prev_rand, + }) +} + +pub struct GPASumcheckResult { + pub claimed_values: Vec, + pub last_sumcheck_value: FieldElement, + pub randomness: Vec, +} + +pub fn eval_linear_poly(poly: &[FieldElement], point: &FieldElement) -> FieldElement { + poly[0] + *point * poly[1] +} + +pub fn calculate_adr(alpha: &Vec) -> FieldElement { + let mut ans = FieldElement::from(0); + let mut mult = FieldElement::from(1); + for a in alpha.iter().rev() { + ans = ans + *a * mult; + mult = mult * FieldElement::from(2); + } + ans +} diff --git a/spark-prover/src/gpa.rs b/spark-prover/src/gpa.rs new file mode 100644 index 00000000..6b8f6ebb --- /dev/null +++ b/spark-prover/src/gpa.rs @@ -0,0 +1,183 @@ +use { + provekit_common::{ + utils::{ + sumcheck::{ + calculate_evaluations_over_boolean_hypercube_for_eq, eval_cubic_poly, + sumcheck_fold_map_reduce, + }, + HALF, + }, + FieldElement, skyscraper::SkyscraperSponge, + }, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + ProverState, + }, + whir::poly_utils::evals::EvaluationsList, +}; + +// TODO: Fix gpa and add line integration + +pub fn run_gpa( + merlin: &mut ProverState, + left: &Vec, + right: &Vec, +) -> Vec { + let mut h = left.clone(); + h.extend(right.iter().cloned()); + let layers = calculate_binary_multiplication_tree(h); + + let mut saved_val_for_sumcheck_equality_assertion; + let mut r; + let mut line_evaluations; + let mut alpha = Vec::::new(); + + (r, saved_val_for_sumcheck_equality_assertion) = add_line_to_merlin(merlin, layers[1].clone()); + + for i in 2..layers.len() { + (line_evaluations, alpha) = run_gpa_sumcheck( + merlin, + &r, + layers[i].clone(), + saved_val_for_sumcheck_equality_assertion, + alpha, + ); + (r, saved_val_for_sumcheck_equality_assertion) = + add_line_to_merlin(merlin, line_evaluations.to_vec()); + } + + alpha.push(r[0]); + + return alpha; +} + +fn calculate_binary_multiplication_tree( + array_to_prove: Vec, +) -> Vec> { + let mut layers = vec![]; + let mut current_layer = array_to_prove; + + while current_layer.len() > 1 { + let mut next_layer = vec![]; + + for i in (0..current_layer.len()).step_by(2) { + let product = current_layer[i] * current_layer[i + 1]; + next_layer.push(product); + } + + layers.push(current_layer); + current_layer = next_layer; + } + + layers.push(current_layer); + layers.reverse(); + layers +} + +fn add_line_to_merlin( + merlin: &mut ProverState, + arr: Vec, +) -> ([FieldElement; 1], FieldElement) { + let l_evaluations = EvaluationsList::new(arr); + let l_temp = l_evaluations.to_coeffs(); + let l: &[FieldElement] = l_temp.coeffs(); + merlin.add_scalars(&l).expect("Failed to add l"); + + let mut r = [FieldElement::from(0); 1]; + merlin + .fill_challenge_scalars(&mut r) + .expect("Failed to add a challenge scalar"); + + let saved_val_for_sumcheck_equality_assertion = l[0] + l[1] * r[0]; + + (r, saved_val_for_sumcheck_equality_assertion) +} + +fn run_gpa_sumcheck( + merlin: &mut ProverState, + r: &[FieldElement; 1], + layer: Vec, + mut saved_val_for_sumcheck_equality_assertion: FieldElement, + mut alpha: Vec, +) -> ([FieldElement; 2], Vec) { + let (mut v0, mut v1) = split_by_index(layer); + alpha.push(r[0]); + let mut eq_r = calculate_evaluations_over_boolean_hypercube_for_eq(&alpha); + let mut alpha_i_wrapped_in_vector = [FieldElement::from(0)]; + let mut alpha = Vec::::new(); + let mut fold = None; + + loop { + let [hhat_i_at_0, hhat_i_at_em1, hhat_i_at_inf_over_x_cube] = + sumcheck_fold_map_reduce([&mut eq_r, &mut v0, &mut v1], fold, |[eq_r, v0, v1]| { + [ + // Evaluation at 0 + eq_r.0 * v0.0 * v1.0, + // Evaluation at -1 + (eq_r.0 + eq_r.0 - eq_r.1) * (v0.0 + v0.0 - v0.1) * (v1.0 + v1.0 - v1.1), + // Evaluation at infinity + (eq_r.1 - eq_r.0) * (v0.1 - v0.0) * (v1.1 - v1.0), + ] + }); + + if fold.is_some() { + eq_r.truncate(eq_r.len() / 2); + v0.truncate(v0.len() / 2); + v1.truncate(v1.len() / 2); + } + + let mut hhat_i_coeffs = [FieldElement::from(0); 4]; + + hhat_i_coeffs[0] = hhat_i_at_0; + hhat_i_coeffs[2] = HALF + * (saved_val_for_sumcheck_equality_assertion + hhat_i_at_em1 + - hhat_i_at_0 + - hhat_i_at_0 + - hhat_i_at_0); + hhat_i_coeffs[3] = hhat_i_at_inf_over_x_cube; + hhat_i_coeffs[1] = saved_val_for_sumcheck_equality_assertion + - hhat_i_coeffs[0] + - hhat_i_coeffs[0] + - hhat_i_coeffs[3] + - hhat_i_coeffs[2]; + + assert_eq!( + saved_val_for_sumcheck_equality_assertion, + hhat_i_coeffs[0] + + hhat_i_coeffs[0] + + hhat_i_coeffs[1] + + hhat_i_coeffs[2] + + hhat_i_coeffs[3] + ); + + let _ = merlin.add_scalars(&hhat_i_coeffs[..]); + let _ = merlin.fill_challenge_scalars(&mut alpha_i_wrapped_in_vector); + fold = Some(alpha_i_wrapped_in_vector[0]); + saved_val_for_sumcheck_equality_assertion = + eval_cubic_poly(&hhat_i_coeffs, &alpha_i_wrapped_in_vector[0]); + alpha.push(alpha_i_wrapped_in_vector[0]); + if eq_r.len() <= 2 { + break; + } + } + + let folded_v0 = v0[0] + (v0[1] - v0[0]) * alpha_i_wrapped_in_vector[0]; + let folded_v1 = v1[0] + (v1[1] - v1[0]) * alpha_i_wrapped_in_vector[0]; + + ([folded_v0, folded_v1], alpha) +} + +fn split_by_index(input: Vec) -> (Vec, Vec) { + let mut even_indexed = Vec::new(); + let mut odd_indexed = Vec::new(); + + for (i, item) in input.into_iter().enumerate() { + if i % 2 == 0 { + even_indexed.push(item); + } else { + odd_indexed.push(item); + } + } + + (even_indexed, odd_indexed) +} diff --git a/spark-prover/src/lib.rs b/spark-prover/src/lib.rs new file mode 100644 index 00000000..c2e08d50 --- /dev/null +++ b/spark-prover/src/lib.rs @@ -0,0 +1,5 @@ +pub mod gpa; +pub mod memory; +pub mod spark; +pub mod utilities; +pub mod whir; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs new file mode 100644 index 00000000..9932b9bd --- /dev/null +++ b/spark-prover/src/main.rs @@ -0,0 +1,54 @@ +use { + anyhow::{Context, Result}, + provekit_common::utils::next_power_of_two, + spark_prover::{ + memory::{calculate_e_values_for_r1cs, calculate_memory}, + spark::prove_spark_for_single_matrix, + utilities::{ + calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, + get_spark_r1cs, SPARKProof, + }, + whir::create_whir_configs, + }, + std::{fs::File, io::Write}, +}; + +fn main() -> Result<()> { + // Run once when receiving the matrix + let r1cs = deserialize_r1cs("spark-prover/r1cs.json") + .context("Error: Failed to create the R1CS object")?; + let spark_r1cs = get_spark_r1cs(&r1cs); + let spark_whir_configs = create_whir_configs(&r1cs); + + // Run for each request + let request = deserialize_request("spark-prover/request.json") + .context("Error: Failed to deserialize the request object")?; + let memory = calculate_memory(request.point_to_evaluate); + let e_values = calculate_e_values_for_r1cs(&memory, &r1cs); + let io_pattern = create_io_pattern(&r1cs, &spark_whir_configs); + let mut merlin = io_pattern.to_prover_state(); + + prove_spark_for_single_matrix( + &mut merlin, + spark_r1cs.a, + memory, + e_values.a, + request.claimed_values.a, + &spark_whir_configs, + )?; + + let spark_proof = SPARKProof { + transcript: merlin.narg_string().to_vec(), + io_pattern: String::from_utf8(io_pattern.as_bytes().to_vec()).unwrap(), + whir_params: spark_whir_configs, + matrix_dimensions: calculate_matrix_dimensions(&r1cs), + }; + + let mut spark_proof_file = File::create("spark-prover/spark_proof.json") + .context("Error: Failed to create the spark proof file")?; + spark_proof_file + .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) + .expect("Writing gnark parameters to a file failed"); + + Ok(()) +} diff --git a/spark-prover/src/memory.rs b/spark-prover/src/memory.rs new file mode 100644 index 00000000..76016fe6 --- /dev/null +++ b/spark-prover/src/memory.rs @@ -0,0 +1,55 @@ +use { + crate::utilities::Point, + provekit_common::{ + utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, FieldElement, + HydratedSparseMatrix, R1CS, + }, +}; + +#[derive(Debug)] +pub struct Memory { + pub eq_rx: Vec, + pub eq_ry: Vec, +} + +#[derive(Debug)] +pub struct EValuesForMatrix { + pub e_rx: Vec, + pub e_ry: Vec, +} + +#[derive(Debug)] +pub struct EValues { + pub a: EValuesForMatrix, + pub b: EValuesForMatrix, + pub c: EValuesForMatrix, +} + +pub fn calculate_memory(point_to_evaluate: Point) -> Memory { + Memory { + eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), + eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col), + } +} + +pub fn calculate_e_values_for_r1cs(memory: &Memory, r1cs: &R1CS) -> EValues { + EValues { + a: calculate_e_values_for_matrix(memory, &r1cs.a()), + b: calculate_e_values_for_matrix(memory, &r1cs.b()), + c: calculate_e_values_for_matrix(memory, &r1cs.c()), + } +} + +pub fn calculate_e_values_for_matrix( + memory: &Memory, + matrix: &HydratedSparseMatrix, +) -> EValuesForMatrix { + let mut e_rx = Vec::::new(); + let mut e_ry = Vec::::new(); + + for ((r, c), _) in matrix.iter() { + e_rx.push(memory.eq_rx[r]); + e_ry.push(memory.eq_ry[c]); + } + EValuesForMatrix { e_rx, e_ry } +} diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs new file mode 100644 index 00000000..e76f6a6f --- /dev/null +++ b/spark-prover/src/spark.rs @@ -0,0 +1,192 @@ +use { + crate::{ + gpa::run_gpa, + memory::{EValuesForMatrix, Memory}, + utilities::matrix::SparkMatrix, + whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigs}, + }, + anyhow::Result, + itertools::izip, + provekit_common::{ + utils::{ + sumcheck::{eval_cubic_poly, sumcheck_fold_map_reduce}, + HALF, + }, + FieldElement, skyscraper::SkyscraperSponge, + }, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + ProverState, + }, + whir::{ + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{committer::CommitmentWriter, utils::HintSerialize}, + }, +}; + +pub fn prove_spark_for_single_matrix( + merlin: &mut ProverState, + matrix: SparkMatrix, + memory: Memory, + e_values: EValuesForMatrix, + claimed_value: FieldElement, + whir_configs: &SPARKWHIRConfigs, +) -> Result<()> { + let committer_a = CommitmentWriter::new(whir_configs.a.clone()); + let committer_row = CommitmentWriter::new(whir_configs.row.clone()); + + let val_witness = commit_to_vector(&committer_a, merlin, matrix.coo.val.clone()); + let e_rx_witness = commit_to_vector(&committer_a, merlin, e_values.e_rx.clone()); + let e_ry_witness = commit_to_vector(&committer_a, merlin, e_values.e_ry.clone()); + + let final_row_witness = + commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); + + let mles = [matrix.coo.val.clone(), e_values.e_rx, e_values.e_ry]; + let (sumcheck_final_folds, folding_randomness) = + run_spark_sumcheck(merlin, mles, claimed_value)?; + + produce_whir_proof( + merlin, + MultilinearPoint(folding_randomness.clone()), + sumcheck_final_folds[0], + whir_configs.a.clone(), + val_witness, + )?; + + produce_whir_proof( + merlin, + MultilinearPoint(folding_randomness.clone()), + sumcheck_final_folds[1], + whir_configs.b.clone(), + e_rx_witness, + )?; + + produce_whir_proof( + merlin, + MultilinearPoint(folding_randomness.clone()), + sumcheck_final_folds[2], + whir_configs.c.clone(), + e_ry_witness, + )?; + + // Rowwise + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + let init_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let init_value = memory.eq_rx.clone(); + let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; + + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let final_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let final_value = memory.eq_rx.clone(); + let final_timestamp = matrix.timestamps.final_row.clone(); + + let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // TODO: Can I avoid evaluating here? + let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&final_row_eval)?; + + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_row_eval, + whir_configs.row.clone(), + final_row_witness, + )?; + + Ok(()) +} + +pub fn run_spark_sumcheck( + merlin: &mut ProverState, + mles: [Vec; 3], + mut claimed_value: FieldElement, +) -> Result<([FieldElement; 3], Vec)> { + let mut sumcheck_randomness = [FieldElement::from(0)]; + let mut sumcheck_randomness_accumulator = Vec::::new(); + let mut fold = None; + + let mut m0 = mles[0].clone(); + let mut m1 = mles[1].clone(); + let mut m2 = mles[2].clone(); + + loop { + let [hhat_i_at_0, hhat_i_at_em1, hhat_i_at_inf_over_x_cube] = + sumcheck_fold_map_reduce([&mut m0, &mut m1, &mut m2], fold, |[m0, m1, m2]| { + [ + // Evaluation at 0 + m0.0 * m1.0 * m2.0, + // Evaluation at -1 + (m0.0 + m0.0 - m0.1) * (m1.0 + m1.0 - m1.1) * (m2.0 + m2.0 - m2.1), + // Evaluation at infinity + (m0.1 - m0.0) * (m1.1 - m1.0) * (m2.1 - m2.0), + ] + }); + + if fold.is_some() { + m0.truncate(m0.len() / 2); + m1.truncate(m1.len() / 2); + m2.truncate(m2.len() / 2); + } + + let mut hhat_i_coeffs = [FieldElement::from(0); 4]; + + hhat_i_coeffs[0] = hhat_i_at_0; + hhat_i_coeffs[2] = + HALF * (claimed_value + hhat_i_at_em1 - hhat_i_at_0 - hhat_i_at_0 - hhat_i_at_0); + hhat_i_coeffs[3] = hhat_i_at_inf_over_x_cube; + hhat_i_coeffs[1] = claimed_value + - hhat_i_coeffs[0] + - hhat_i_coeffs[0] + - hhat_i_coeffs[3] + - hhat_i_coeffs[2]; + + assert_eq!( + claimed_value, + hhat_i_coeffs[0] + + hhat_i_coeffs[0] + + hhat_i_coeffs[1] + + hhat_i_coeffs[2] + + hhat_i_coeffs[3] + ); + + merlin.add_scalars(&hhat_i_coeffs[..])?; + merlin.fill_challenge_scalars(&mut sumcheck_randomness)?; + fold = Some(sumcheck_randomness[0]); + claimed_value = eval_cubic_poly(&hhat_i_coeffs, &sumcheck_randomness[0]); + sumcheck_randomness_accumulator.push(sumcheck_randomness[0]); + if m0.len() <= 2 { + break; + } + } + + let folded_v0 = m0[0] + (m0[1] - m0[0]) * sumcheck_randomness[0]; + let folded_v1 = m1[0] + (m1[1] - m1[0]) * sumcheck_randomness[0]; + let folded_v2 = m2[0] + (m2[1] - m2[0]) * sumcheck_randomness[0]; + + merlin.hint::>(&[folded_v0, folded_v1, folded_v2].to_vec())?; + Ok(( + [folded_v0, folded_v1, folded_v2], + sumcheck_randomness_accumulator, + )) +} diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs new file mode 100644 index 00000000..33d1f313 --- /dev/null +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -0,0 +1,54 @@ +use { + crate::whir::SPARKWHIRConfigs, + provekit_common::{ + utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, + FieldElement, IOPattern, R1CS, + }, + spongefish::codecs::arkworks_algebra::FieldDomainSeparator, + whir::whir::domainsep::WhirDomainSeparator, +}; + +pub trait SPARKDomainSeparator { + fn add_tau_and_gamma(self) -> Self; + + fn add_line(self) -> Self; +} + +impl SPARKDomainSeparator for IOPattern +where + IOPattern: FieldDomainSeparator, +{ + fn add_tau_and_gamma(self) -> Self { + self.challenge_scalars(2, "tau and gamma") + } + + fn add_line(self) -> Self { + self.add_scalars(2, "gpa line") + .challenge_scalars(1, "gpa line random") + } +} + +pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { + let mut io = IOPattern::new("💥") + .commit_statement(&configs.a) + .commit_statement(&configs.a) + .commit_statement(&configs.a) + .commit_statement(&configs.row) + .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.a) + .add_whir_proof(&configs.a) + .add_whir_proof(&configs.a) + .add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.a.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Row final counter claimed evaluation") + .add_whir_proof(&configs.row); + + io +} diff --git a/spark-prover/src/utilities/matrix/mod.rs b/spark-prover/src/utilities/matrix/mod.rs new file mode 100644 index 00000000..5cba9545 --- /dev/null +++ b/spark-prover/src/utilities/matrix/mod.rs @@ -0,0 +1,86 @@ +use provekit_common::{FieldElement, HydratedSparseMatrix, SparseMatrix, R1CS}; + +#[derive(Debug)] +pub struct SparkR1CS { + pub a: SparkMatrix, + pub b: SparkMatrix, + pub c: SparkMatrix, +} +#[derive(Debug)] +pub struct SparkMatrix { + pub coo: COOMatrix, + pub timestamps: TimeStamps, +} +#[derive(Debug)] +pub struct COOMatrix { + pub row: Vec, + pub col: Vec, + pub val: Vec, +} +#[derive(Debug)] +pub struct TimeStamps { + pub read_row: Vec, + pub read_col: Vec, + pub final_row: Vec, + pub final_col: Vec, +} + +pub fn get_spark_r1cs(r1cs: &R1CS) -> SparkR1CS { + SparkR1CS { + a: get_spark_matrix(&r1cs.a()), + b: get_spark_matrix(&r1cs.b()), + c: get_spark_matrix(&r1cs.c()), + } +} + +pub fn get_spark_matrix(matrix: &HydratedSparseMatrix) -> SparkMatrix { + SparkMatrix { + coo: get_coordinate_rep_of_a_matrix(matrix), + timestamps: calculate_timestamps(matrix), + } +} + +pub fn get_coordinate_rep_of_a_matrix(matrix: &HydratedSparseMatrix) -> COOMatrix { + let mut row = Vec::::new(); + let mut col = Vec::::new(); + let mut val = Vec::::new(); + + for ((r, c), value) in matrix.iter() { + row.push(FieldElement::from(r as u64)); + col.push(FieldElement::from(c as u64)); + val.push(value.clone()); + } + + COOMatrix { row, col, val } +} + +pub fn calculate_timestamps(matrix: &HydratedSparseMatrix) -> TimeStamps { + let mut read_row_counters = vec![0; matrix.matrix.num_rows]; + let mut read_row = Vec::::new(); + let mut read_col_counters = vec![0; matrix.matrix.num_cols]; + let mut read_col = Vec::::new(); + + for ((r, c), _) in matrix.iter() { + read_row.push(FieldElement::from(read_row_counters[r] as u64)); + read_row_counters[r] += 1; + read_col.push(FieldElement::from(read_col_counters[c] as u64)); + read_col_counters[c] += 1; + } + + let final_row = read_row_counters + .iter() + .map(|&x| FieldElement::from(x as u64)) + .collect::>(); + + let final_col = read_col_counters + .iter() + .map(|&x| FieldElement::from(x as u64)) + .collect::>(); + + TimeStamps { + read_row, + read_col, + final_row, + final_col, + } +} diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs new file mode 100644 index 00000000..b967aef1 --- /dev/null +++ b/spark-prover/src/utilities/mod.rs @@ -0,0 +1,76 @@ +mod iopattern; +pub mod matrix; +use { + crate::whir::SPARKWHIRConfigs, + anyhow::{Context, Result}, + provekit_common::{ + utils::{serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, + FieldElement, HydratedSparseMatrix, WhirConfig, R1CS, + }, + serde::{Deserialize, Serialize}, + std::fs, +}; +pub use {iopattern::create_io_pattern, matrix::get_spark_r1cs}; + +pub fn deserialize_r1cs(path_str: &str) -> Result { + let json_str = + fs::read_to_string(path_str).context("Error: Failed to open the r1cs.json file")?; + serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") +} + +pub fn deserialize_request(path_str: &str) -> Result { + let json_str = + fs::read_to_string(path_str).context("Error: Failed to open the request.json file")?; + serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct SPARKRequest { + pub point_to_evaluate: Point, + pub claimed_values: ClaimedValues, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Point { + #[serde(with = "serde_ark")] + pub row: Vec, + #[serde(with = "serde_ark")] + pub col: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ClaimedValues { + #[serde(with = "serde_ark")] + pub a: FieldElement, + #[serde(with = "serde_ark")] + pub b: FieldElement, + #[serde(with = "serde_ark")] + pub c: FieldElement, +} + +#[derive(Serialize, Deserialize)] +pub struct SPARKProof { + pub transcript: Vec, + pub io_pattern: String, + pub whir_params: SPARKWHIRConfigs, + pub matrix_dimensions: MatrixDimensions, +} + +#[derive(Serialize, Deserialize)] +pub struct MatrixDimensions { + pub num_rows: usize, + pub num_cols: usize, + pub a_nonzero_terms: usize, + pub b_nonzero_terms: usize, + pub c_nonzero_terms: usize, +} + +pub fn calculate_matrix_dimensions(r1cs: &R1CS) -> MatrixDimensions { + MatrixDimensions { + num_rows: r1cs.a.num_rows, + num_cols: r1cs.a.num_cols, + a_nonzero_terms: r1cs.a.num_entries(), + b_nonzero_terms: r1cs.b.num_entries(), + c_nonzero_terms: r1cs.c.num_entries(), + } +} diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs new file mode 100644 index 00000000..fd9d81d6 --- /dev/null +++ b/spark-prover/src/whir.rs @@ -0,0 +1,71 @@ +use { + anyhow::{Context, Result}, + provekit_common::{ + WhirR1CSScheme, utils::next_power_of_two, FieldElement, skyscraper::SkyscraperMerkleConfig, + skyscraper::SkyscraperPoW, skyscraper::SkyscraperSponge, WhirConfig, R1CS, + }, + provekit_r1cs_compiler::WhirR1CSSchemeBuilder, + serde::{Deserialize, Serialize}, + spongefish::ProverState, + whir::{ + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{ + committer::{CommitmentWriter, Witness}, + prover::Prover, + statement::{Statement, Weights}, + }, + }, +}; + +pub fn commit_to_vector( + committer: &CommitmentWriter, + merlin: &mut ProverState, + vector: Vec, +) -> Witness { + assert!( + vector.len().is_power_of_two(), + "Committed vector length must be a power of two" + ); + let evals = EvaluationsList::new(vector); + let coeffs = evals.to_coeffs(); + committer + .commit(merlin, coeffs) + .expect("WHIR prover failed to commit") +} + +#[derive(Serialize, Deserialize)] +pub struct SPARKWHIRConfigs { + pub row: WhirConfig, + pub col: WhirConfig, + pub a: WhirConfig, + pub b: WhirConfig, + pub c: WhirConfig, +} + +pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { + SPARKWHIRConfigs { + row: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_rows), 1), + col: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_cols), 1), + a: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 1), + b: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 1), + c: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 1), + } +} + +pub fn produce_whir_proof( + merlin: &mut ProverState, + evaluation_point: MultilinearPoint, + evaluated_value: FieldElement, + config: WhirConfig, + witness: Witness, +) -> Result<()> { + let mut statement = Statement::::new(evaluation_point.num_variables()); + statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); + let prover = Prover(config); + + prover + .prove(merlin, statement, witness) + .context("while generating WHIR proof")?; + + Ok(()) +} From 03e2726d6a548758faee4e03cb8df31c646717db Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 16 Sep 2025 16:58:49 +0800 Subject: [PATCH 02/34] Adds RS and WS --- provekit/common/src/lib.rs | 6 +- spark-prover/src/bin/generate_test_r1cs.rs | 4 +- spark-prover/src/bin/spark-verifier.rs | 75 ++++++++++++++++++- spark-prover/src/gpa.rs | 3 +- spark-prover/src/spark.rs | 82 +++++++++++++++++++-- spark-prover/src/utilities/iopattern/mod.rs | 18 +++++ spark-prover/src/whir.rs | 5 +- 7 files changed, 176 insertions(+), 17 deletions(-) diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs index 0e9288f2..68efb571 100644 --- a/provekit/common/src/lib.rs +++ b/provekit/common/src/lib.rs @@ -8,14 +8,12 @@ pub mod utils; mod whir_r1cs; pub mod witness; -use crate::{ - interner::{InternedFieldElement, Interner}, -}; +use crate::interner::{InternedFieldElement, Interner}; pub use { - sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, acir::FieldElement as NoirElement, noir_proof_scheme::{NoirProof, NoirProofScheme}, r1cs::R1CS, + sparse_matrix::{HydratedSparseMatrix, SparseMatrix}, whir::crypto::fields::Field256 as FieldElement, whir_r1cs::{IOPattern, WhirConfig, WhirR1CSProof, WhirR1CSScheme}, }; diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs index 1a1cb7e0..5c0c3fc7 100644 --- a/spark-prover/src/bin/generate_test_r1cs.rs +++ b/spark-prover/src/bin/generate_test_r1cs.rs @@ -16,8 +16,8 @@ fn main() { let matrix_json = serde_json::to_string(&r1cs).expect("Error: Failed to serialize R1CS to JSON"); - let mut request_file = File::create("spark-prover/r1cs.json") - .expect("Error: Failed to create the r1cs.json file"); + let mut request_file = + File::create("spark-prover/r1cs.json").expect("Error: Failed to create the r1cs.json file"); request_file .write_all(matrix_json.as_bytes()) .expect("Error: Failed to write JSON data to r1cs.json"); diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 9334ee82..f8fcc03a 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -2,11 +2,12 @@ use { anyhow::{ensure, Context, Result}, ark_std::{One, Zero}, provekit_common::{ + skyscraper::SkyscraperSponge, utils::{ next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, }, - FieldElement, IOPattern, skyscraper::SkyscraperSponge, + FieldElement, IOPattern, }, spark_prover::utilities::{SPARKProof, SPARKRequest}, spongefish::{ @@ -46,6 +47,8 @@ fn main() -> Result<()> { let e_rx_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); let e_ry_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); let final_row_commitment = commitment_reader_row.parse_commitment(&mut arthur).unwrap(); + let row_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + let read_ts_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); let (randomness, last_sumcheck_value) = run_sumcheck_verifier_spark( &mut arthur, @@ -102,6 +105,9 @@ fn main() -> Result<()> { next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, )?; + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); let init_adr = calculate_adr(&evaluation_randomness.to_vec()); @@ -140,6 +146,73 @@ fn main() -> Result<()> { ensure!(evaluated_value == gpa_result.last_sumcheck_value); + // let mut rs_address: FieldElement = arthur.hint()?; + let gpa_result = gpa_sumcheck_verifier( + &mut arthur, + next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, + )?; + + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let rs_adr = arthur.hint()?; + + let mut rs_adr_statement = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + rs_adr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_adr, + ); + + let rs_adr_verifier = Verifier::new(&spark_proof.whir_params.a); + rs_adr_verifier + .verify(&mut arthur, &row_commitment, &rs_adr_statement) + .context("while verifying WHIR")?; + + let rs_mem = arthur.hint()?; + + let mut rs_val_statement = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + rs_val_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_mem, + ); + + let rs_val_verifier = Verifier::new(&spark_proof.whir_params.a); + rs_val_verifier + .verify(&mut arthur, &e_rx_commitment, &rs_val_statement) + .context("while verifying WHIR")?; + + let rs_timestamp = arthur.hint()?; + + let mut rs_timestamp_statement = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); + rs_timestamp_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_timestamp, + ); + + let rs_timestamp_verifier = Verifier::new(&spark_proof.whir_params.a); + rs_timestamp_verifier + .verify(&mut arthur, &read_ts_commitment, &rs_timestamp_statement) + .context("while verifying WHIR")?; + + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = + rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = + rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.last_sumcheck_value); + + ensure!(claimed_init * claimed_ws == claimed_final * claimed_rs); + Ok(()) } diff --git a/spark-prover/src/gpa.rs b/spark-prover/src/gpa.rs index 6b8f6ebb..26423f1e 100644 --- a/spark-prover/src/gpa.rs +++ b/spark-prover/src/gpa.rs @@ -1,5 +1,6 @@ use { provekit_common::{ + skyscraper::SkyscraperSponge, utils::{ sumcheck::{ calculate_evaluations_over_boolean_hypercube_for_eq, eval_cubic_poly, @@ -7,7 +8,7 @@ use { }, HALF, }, - FieldElement, skyscraper::SkyscraperSponge, + FieldElement, }, spongefish::{ codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index e76f6a6f..6f240e2f 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -8,11 +8,12 @@ use { anyhow::Result, itertools::izip, provekit_common::{ + skyscraper::SkyscraperSponge, utils::{ sumcheck::{eval_cubic_poly, sumcheck_fold_map_reduce}, HALF, }, - FieldElement, skyscraper::SkyscraperSponge, + FieldElement, }, spongefish::{ codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, @@ -39,10 +40,17 @@ pub fn prove_spark_for_single_matrix( let e_rx_witness = commit_to_vector(&committer_a, merlin, e_values.e_rx.clone()); let e_ry_witness = commit_to_vector(&committer_a, merlin, e_values.e_ry.clone()); - let final_row_witness = + let final_row_ts_witness = commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); + let row_witness = commit_to_vector(&committer_a, merlin, matrix.coo.row.clone()); + let read_ts_witness = + commit_to_vector(&committer_a, merlin, matrix.timestamps.read_row.clone()); - let mles = [matrix.coo.val.clone(), e_values.e_rx, e_values.e_ry]; + let mles = [ + matrix.coo.val.clone(), + e_values.e_rx.clone(), + e_values.e_ry.clone(), + ]; let (sumcheck_final_folds, folding_randomness) = run_spark_sumcheck(merlin, mles, claimed_value)?; @@ -51,7 +59,7 @@ pub fn prove_spark_for_single_matrix( MultilinearPoint(folding_randomness.clone()), sumcheck_final_folds[0], whir_configs.a.clone(), - val_witness, + val_witness.clone(), )?; produce_whir_proof( @@ -59,7 +67,7 @@ pub fn prove_spark_for_single_matrix( MultilinearPoint(folding_randomness.clone()), sumcheck_final_folds[1], whir_configs.b.clone(), - e_rx_witness, + e_rx_witness.clone(), )?; produce_whir_proof( @@ -100,7 +108,7 @@ pub fn prove_spark_for_single_matrix( let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - + // TODO: Can I avoid evaluating here? let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); @@ -111,7 +119,67 @@ pub fn prove_spark_for_single_matrix( MultilinearPoint(evaluation_randomness.to_vec()), final_row_eval, whir_configs.row.clone(), - final_row_witness, + final_row_ts_witness, + )?; + + let rs_address = matrix.coo.row.clone(); + let rs_value = e_values.e_rx.clone(); + let rs_timestamp = matrix.timestamps.read_row.clone(); + + let rs_vec: Vec = + izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let ws_address = matrix.coo.row.clone(); + let ws_value = e_values.e_rx.clone(); + let ws_timestamp: Vec = matrix + .timestamps + .read_row + .into_iter() + .map(|a| a + FieldElement::from(1)) + .collect(); + + let ws_vec: Vec = + izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let rs_address_eval = EvaluationsList::new(rs_address) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_address_eval)?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + rs_address_eval, + whir_configs.a.clone(), + row_witness.clone(), + )?; + + let rs_value_eval = EvaluationsList::new(rs_value) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_value_eval)?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + rs_value_eval, + whir_configs.a.clone(), + e_rx_witness.clone(), + )?; + + let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_timestamp_eval)?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + rs_timestamp_eval, + whir_configs.a.clone(), + read_ts_witness.clone(), )?; Ok(()) diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 33d1f313..b071fbfc 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -34,6 +34,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .commit_statement(&configs.a) .commit_statement(&configs.a) .commit_statement(&configs.row) + .commit_statement(&configs.a) + .commit_statement(&configs.a) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) .hint("sumcheck_last_folds") .add_whir_proof(&configs.a) @@ -50,5 +52,21 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .hint("Row final counter claimed evaluation") .add_whir_proof(&configs.row); + for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .add_whir_proof(&configs.a); + + io = io + .hint("RS value claimed evaluation") + .add_whir_proof(&configs.a); + + io = io + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.a); io } diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index fd9d81d6..7fcbc3c0 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -1,8 +1,9 @@ use { anyhow::{Context, Result}, provekit_common::{ - WhirR1CSScheme, utils::next_power_of_two, FieldElement, skyscraper::SkyscraperMerkleConfig, - skyscraper::SkyscraperPoW, skyscraper::SkyscraperSponge, WhirConfig, R1CS, + skyscraper::{SkyscraperMerkleConfig, SkyscraperPoW, SkyscraperSponge}, + utils::next_power_of_two, + FieldElement, WhirConfig, WhirR1CSScheme, R1CS, }, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, serde::{Deserialize, Serialize}, From dd1dd4216db90823efa0538250d7d936fee47c06 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 17 Sep 2025 14:24:13 +0800 Subject: [PATCH 03/34] wip: batch whir --- spark-prover/src/bin/spark-verifier.rs | 284 ++++++++++---------- spark-prover/src/spark.rs | 271 ++++++++++--------- spark-prover/src/utilities/iopattern/mod.rs | 50 ++-- spark-prover/src/whir.rs | 2 + 4 files changed, 296 insertions(+), 311 deletions(-) diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index f8fcc03a..33723ef2 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -42,10 +42,10 @@ fn main() -> Result<()> { let commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a); let commitment_reader_row = CommitmentReader::new(&spark_proof.whir_params.row); + let a_spark_sumcheck_commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a_spark_sumcheck); - let val_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); - let e_rx_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); - let e_ry_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); + let a_spark_sumcheck_commitment = a_spark_sumcheck_commitment_reader.parse_commitment(&mut arthur)?; + let final_row_commitment = commitment_reader_row.parse_commitment(&mut arthur).unwrap(); let row_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); let read_ts_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); @@ -59,159 +59,147 @@ fn main() -> Result<()> { let final_folds: Vec = arthur.hint()?; - let mut val_statement_verifier = Statement::::new(next_power_of_two( + let mut a_spark_sumcheck_statement_verifier = Statement::::new(next_power_of_two( spark_proof.matrix_dimensions.a_nonzero_terms, )); - val_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0], - ); - let val_verifier = Verifier::new(&spark_proof.whir_params.a); - val_verifier - .verify(&mut arthur, &val_commitment, &val_statement_verifier) - .context("while verifying WHIR")?; - let mut e_rx_statement_verifier = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, - )); - e_rx_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[1], - ); - let e_rx_verifier = Verifier::new(&spark_proof.whir_params.a); - e_rx_verifier - .verify(&mut arthur, &e_rx_commitment, &e_rx_statement_verifier) - .context("while verifying WHIR")?; + // a_spark_sumcheck_statement_verifier.add_constraint( + // Weights::evaluation(MultilinearPoint(randomness.clone())), + // final_folds[0] + + // final_folds[1] * a_spark_sumcheck_commitment.batching_randomness + + // final_folds[2] * a_spark_sumcheck_commitment.batching_randomness * a_spark_sumcheck_commitment.batching_randomness, + // ); + println!("{:?}", final_folds[0] + final_folds[1] * a_spark_sumcheck_commitment.batching_randomness); - let mut e_ry_statement_verifier = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, - )); - e_ry_statement_verifier.add_constraint( + a_spark_sumcheck_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[2], + final_folds[0] + final_folds[1] * a_spark_sumcheck_commitment.batching_randomness, ); - let e_ry_verifier = Verifier::new(&spark_proof.whir_params.a); - e_ry_verifier - .verify(&mut arthur, &e_ry_commitment, &e_ry_statement_verifier) - .context("while verifying WHIR")?; - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let gpa_result = gpa_sumcheck_verifier( - &mut arthur, - next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, - )?; - - let claimed_init = gpa_result.claimed_values[0]; - let claimed_final = gpa_result.claimed_values[1]; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - let init_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); - let init_cntr = FieldElement::from(0); - - let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - - let mut final_cntr: FieldElement = arthur.hint()?; - - let mut final_cntr_statement = - Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); - final_cntr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - final_cntr, - ); - - let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); - final_cntr_verifier - .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; - - let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - let final_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); - - let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - - let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - + final_opening * last_randomness[0]; - - ensure!(evaluated_value == gpa_result.last_sumcheck_value); - - // let mut rs_address: FieldElement = arthur.hint()?; - let gpa_result = gpa_sumcheck_verifier( - &mut arthur, - next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, - )?; - - let claimed_rs = gpa_result.claimed_values[0]; - let claimed_ws = gpa_result.claimed_values[1]; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let rs_adr = arthur.hint()?; - - let mut rs_adr_statement = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, - )); - rs_adr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr, - ); - - let rs_adr_verifier = Verifier::new(&spark_proof.whir_params.a); - rs_adr_verifier - .verify(&mut arthur, &row_commitment, &rs_adr_statement) - .context("while verifying WHIR")?; - - let rs_mem = arthur.hint()?; - - let mut rs_val_statement = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, - )); - rs_val_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_mem, - ); - - let rs_val_verifier = Verifier::new(&spark_proof.whir_params.a); - rs_val_verifier - .verify(&mut arthur, &e_rx_commitment, &rs_val_statement) - .context("while verifying WHIR")?; - - let rs_timestamp = arthur.hint()?; - - let mut rs_timestamp_statement = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, - )); - rs_timestamp_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_timestamp, - ); - - let rs_timestamp_verifier = Verifier::new(&spark_proof.whir_params.a); - rs_timestamp_verifier - .verify(&mut arthur, &read_ts_commitment, &rs_timestamp_statement) - .context("while verifying WHIR")?; - - let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = - rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - - let evaluated_value = - rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + // let + let a_spark_sumcheck_verifier = Verifier::new(&spark_proof.whir_params.a_spark_sumcheck); + a_spark_sumcheck_verifier.verify(&mut arthur, &a_spark_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; + + // val_verifier + // .verify(&mut arthur, &val_commitment, &val_statement_verifier) + // .context("while verifying WHIR")?; + + // let mut tau_and_gamma = [FieldElement::from(0); 2]; + // arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + // let tau = tau_and_gamma[0]; + // let gamma = tau_and_gamma[1]; + + // let gpa_result = gpa_sumcheck_verifier( + // &mut arthur, + // next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, + // )?; + + // let claimed_init = gpa_result.claimed_values[0]; + // let claimed_final = gpa_result.claimed_values[1]; + + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let init_mem = calculate_eq( + // &request.point_to_evaluate.row, + // &evaluation_randomness.to_vec(), + // ); + // let init_cntr = FieldElement::from(0); + + // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + + // let mut final_cntr: FieldElement = arthur.hint()?; + + // let mut final_cntr_statement = + // Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); + // final_cntr_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // final_cntr, + // ); + + // let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); + // final_cntr_verifier + // .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) + // .context("while verifying WHIR")?; + + // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let final_mem = calculate_eq( + // &request.point_to_evaluate.row, + // &evaluation_randomness.to_vec(), + // ); + + // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + + // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + // + final_opening * last_randomness[0]; + + // ensure!(evaluated_value == gpa_result.last_sumcheck_value); + + // // let mut rs_address: FieldElement = arthur.hint()?; + // let gpa_result = gpa_sumcheck_verifier( + // &mut arthur, + // next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, + // )?; + + // let claimed_rs = gpa_result.claimed_values[0]; + // let claimed_ws = gpa_result.claimed_values[1]; + + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + // let rs_adr = arthur.hint()?; + + // let mut rs_adr_statement = Statement::::new(next_power_of_two( + // spark_proof.matrix_dimensions.a_nonzero_terms, + // )); + // rs_adr_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // rs_adr, + // ); + + // let rs_adr_verifier = Verifier::new(&spark_proof.whir_params.a); + // rs_adr_verifier + // .verify(&mut arthur, &row_commitment, &rs_adr_statement) + // .context("while verifying WHIR")?; + + // let rs_mem = arthur.hint()?; + + // let mut rs_val_statement = Statement::::new(next_power_of_two( + // spark_proof.matrix_dimensions.a_nonzero_terms, + // )); + // rs_val_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // rs_mem, + // ); + + // let rs_val_verifier = Verifier::new(&spark_proof.whir_params.a); + // rs_val_verifier + // .verify(&mut arthur, &e_rx_commitment, &rs_val_statement) + // .context("while verifying WHIR")?; + + // let rs_timestamp = arthur.hint()?; + + // let mut rs_timestamp_statement = Statement::::new(next_power_of_two( + // spark_proof.matrix_dimensions.a_nonzero_terms, + // )); + // rs_timestamp_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // rs_timestamp, + // ); + + // let rs_timestamp_verifier = Verifier::new(&spark_proof.whir_params.a); + // rs_timestamp_verifier + // .verify(&mut arthur, &read_ts_commitment, &rs_timestamp_statement) + // .context("while verifying WHIR")?; - ensure!(evaluated_value == gpa_result.last_sumcheck_value); + // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + // let ws_opening = + // rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - ensure!(claimed_init * claimed_ws == claimed_final * claimed_rs); + // let evaluated_value = + // rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + + // ensure!(evaluated_value == gpa_result.last_sumcheck_value); + + // ensure!(claimed_init * claimed_ws == claimed_final * claimed_rs); Ok(()) } diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 6f240e2f..15894b33 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -5,7 +5,7 @@ use { utilities::matrix::SparkMatrix, whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigs}, }, - anyhow::Result, + anyhow::{ensure, Result}, itertools::izip, provekit_common::{ skyscraper::SkyscraperSponge, @@ -21,7 +21,7 @@ use { }, whir::{ poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, - whir::{committer::CommitmentWriter, utils::HintSerialize}, + whir::{committer::CommitmentWriter, prover::Prover, statement::{Statement, Weights}, utils::HintSerialize}, }, }; @@ -35,16 +35,18 @@ pub fn prove_spark_for_single_matrix( ) -> Result<()> { let committer_a = CommitmentWriter::new(whir_configs.a.clone()); let committer_row = CommitmentWriter::new(whir_configs.row.clone()); + let a_spark_sumcheck_committer = CommitmentWriter::new(whir_configs.a_spark_sumcheck.clone()); - let val_witness = commit_to_vector(&committer_a, merlin, matrix.coo.val.clone()); - let e_rx_witness = commit_to_vector(&committer_a, merlin, e_values.e_rx.clone()); - let e_ry_witness = commit_to_vector(&committer_a, merlin, e_values.e_ry.clone()); + let val_coeff = EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(); + let e_rx_coeff = EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(); + let e_ry_coeff = EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(); - let final_row_ts_witness = - commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); + // let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff, e_ry_coeff])?; + let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff]).expect("Failed to commit"); + + let final_row_ts_witness = commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); let row_witness = commit_to_vector(&committer_a, merlin, matrix.coo.row.clone()); - let read_ts_witness = - commit_to_vector(&committer_a, merlin, matrix.timestamps.read_row.clone()); + let read_ts_witness = commit_to_vector(&committer_a, merlin, matrix.timestamps.read_row.clone()); let mles = [ matrix.coo.val.clone(), @@ -54,133 +56,130 @@ pub fn prove_spark_for_single_matrix( let (sumcheck_final_folds, folding_randomness) = run_spark_sumcheck(merlin, mles, claimed_value)?; - produce_whir_proof( - merlin, - MultilinearPoint(folding_randomness.clone()), - sumcheck_final_folds[0], - whir_configs.a.clone(), - val_witness.clone(), - )?; - - produce_whir_proof( - merlin, - MultilinearPoint(folding_randomness.clone()), - sumcheck_final_folds[1], - whir_configs.b.clone(), - e_rx_witness.clone(), - )?; - - produce_whir_proof( - merlin, - MultilinearPoint(folding_randomness.clone()), - sumcheck_final_folds[2], - whir_configs.c.clone(), - e_ry_witness, - )?; - - // Rowwise - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let init_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let init_value = memory.eq_rx.clone(); - let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; - - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let final_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let final_value = memory.eq_rx.clone(); - let final_timestamp = matrix.timestamps.final_row.clone(); - - let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - // TODO: Can I avoid evaluating here? - let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&final_row_eval)?; - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_row_eval, - whir_configs.row.clone(), - final_row_ts_witness, - )?; - - let rs_address = matrix.coo.row.clone(); - let rs_value = e_values.e_rx.clone(); - let rs_timestamp = matrix.timestamps.read_row.clone(); - - let rs_vec: Vec = - izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let ws_address = matrix.coo.row.clone(); - let ws_value = e_values.e_rx.clone(); - let ws_timestamp: Vec = matrix - .timestamps - .read_row - .into_iter() - .map(|a| a + FieldElement::from(1)) - .collect(); - - let ws_vec: Vec = - izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let rs_address_eval = EvaluationsList::new(rs_address) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_address_eval)?; - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - rs_address_eval, - whir_configs.a.clone(), - row_witness.clone(), - )?; - - let rs_value_eval = EvaluationsList::new(rs_value) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_value_eval)?; - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - rs_value_eval, - whir_configs.a.clone(), - e_rx_witness.clone(), - )?; - - let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_timestamp_eval)?; - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - rs_timestamp_eval, - whir_configs.a.clone(), - read_ts_witness.clone(), - )?; + let mut spark_sumcheck_statement = Statement::::new(folding_randomness.len()); + // let claimed_batched_value = + // sumcheck_final_folds[0] + + // sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness + + // sumcheck_final_folds[2] * spark_sumcheck_witness.batching_randomness * spark_sumcheck_witness.batching_randomness; + + let claimed_batched_value = + sumcheck_final_folds[0] + + sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness; + + let actual_val = spark_sumcheck_witness.batched_poly().evaluate(&MultilinearPoint(folding_randomness.clone())); + ensure!(actual_val == claimed_batched_value); + println!("{:?}", actual_val); + println!("{:?}", claimed_batched_value); + + spark_sumcheck_statement.add_constraint( + Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); + + let prover = Prover(whir_configs.a_spark_sumcheck.clone()); + prover.prove(merlin, spark_sumcheck_statement, spark_sumcheck_witness)?; + + // // Rowwise + + // let mut tau_and_gamma = [FieldElement::from(0); 2]; + // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + // let tau = tau_and_gamma[0]; + // let gamma = tau_and_gamma[1]; + + // let init_address: Vec = (0..memory.eq_rx.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let init_value = memory.eq_rx.clone(); + // let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; + + // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let final_address: Vec = (0..memory.eq_rx.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let final_value = memory.eq_rx.clone(); + // let final_timestamp = matrix.timestamps.final_row.clone(); + + // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + // let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // // TODO: Can I avoid evaluating here? + // let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&final_row_eval)?; + + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // final_row_eval, + // whir_configs.row.clone(), + // final_row_ts_witness, + // )?; + + // let rs_address = matrix.coo.row.clone(); + // let rs_value = e_values.e_rx.clone(); + // let rs_timestamp = matrix.timestamps.read_row.clone(); + + // let rs_vec: Vec = + // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let ws_address = matrix.coo.row.clone(); + // let ws_value = e_values.e_rx.clone(); + // let ws_timestamp: Vec = matrix + // .timestamps + // .read_row + // .into_iter() + // .map(|a| a + FieldElement::from(1)) + // .collect(); + + // let ws_vec: Vec = + // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + // let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // let rs_address_eval = EvaluationsList::new(rs_address) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_address_eval)?; + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // rs_address_eval, + // whir_configs.a.clone(), + // row_witness.clone(), + // )?; + + // let rs_value_eval = EvaluationsList::new(rs_value) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_value_eval)?; + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // rs_value_eval, + // whir_configs.a.clone(), + // e_rx_witness.clone(), + // )?; + + // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_timestamp_eval)?; + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // rs_timestamp_eval, + // whir_configs.a.clone(), + // read_ts_witness.clone(), + // )?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index b071fbfc..ac05f7ab 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -30,43 +30,39 @@ where pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { let mut io = IOPattern::new("💥") - .commit_statement(&configs.a) - .commit_statement(&configs.a) - .commit_statement(&configs.a) + .commit_statement(&configs.a_spark_sumcheck) .commit_statement(&configs.row) .commit_statement(&configs.a) .commit_statement(&configs.a) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) .hint("sumcheck_last_folds") - .add_whir_proof(&configs.a) - .add_whir_proof(&configs.a) - .add_whir_proof(&configs.a) - .add_tau_and_gamma(); + .add_whir_proof(&configs.a_spark_sumcheck); + // .add_tau_and_gamma(); - for i in 0..=next_power_of_two(r1cs.a.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.a.num_rows) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("Row final counter claimed evaluation") - .add_whir_proof(&configs.row); + // io = io + // .hint("Row final counter claimed evaluation") + // .add_whir_proof(&configs.row); - for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("RS address claimed evaluation") - .add_whir_proof(&configs.a); + // io = io + // .hint("RS address claimed evaluation") + // .add_whir_proof(&configs.a); - io = io - .hint("RS value claimed evaluation") - .add_whir_proof(&configs.a); + // io = io + // .hint("RS value claimed evaluation") + // .add_whir_proof(&configs.a); - io = io - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.a); + // io = io + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.a); io } diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index 7fcbc3c0..100a7547 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -41,6 +41,7 @@ pub struct SPARKWHIRConfigs { pub a: WhirConfig, pub b: WhirConfig, pub c: WhirConfig, + pub a_spark_sumcheck: WhirConfig, } pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { @@ -50,6 +51,7 @@ pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { a: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 1), b: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 1), c: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 1), + a_spark_sumcheck: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 2), } } From f5ca88d17e5654a747c1abd84ee715287ca6efef Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 17 Sep 2025 16:10:06 +0800 Subject: [PATCH 04/34] Adds test --- spark-prover/src/bin/test-batched-whir.rs | 35 +++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 spark-prover/src/bin/test-batched-whir.rs diff --git a/spark-prover/src/bin/test-batched-whir.rs b/spark-prover/src/bin/test-batched-whir.rs new file mode 100644 index 00000000..056b60af --- /dev/null +++ b/spark-prover/src/bin/test-batched-whir.rs @@ -0,0 +1,35 @@ +use provekit_common::{FieldElement, IOPattern, WhirR1CSScheme}; +use provekit_r1cs_compiler::WhirR1CSSchemeBuilder; +use whir::{poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{committer::{CommitmentReader, CommitmentWriter}, domainsep::WhirDomainSeparator, prover::Prover, statement::{Statement, Weights}, verifier::Verifier}}; +use anyhow::Result; + +fn main() -> Result<()> { + let whir_config = WhirR1CSScheme::new_whir_config_for_size(6, 2); + let mut io = IOPattern::new("💥") + .commit_statement(&whir_config) + .add_whir_proof(&whir_config); + let mut merlin = io.to_prover_state(); + + let poly1 = EvaluationsList::new([FieldElement::from(1); 64].to_vec()).to_coeffs(); + let poly2 = EvaluationsList::new([FieldElement::from(2); 64].to_vec()).to_coeffs(); + let committer = CommitmentWriter::new(whir_config.clone()); + let witness = committer.commit_batch(&mut merlin, &[poly1, poly2]).expect("Failed to commit"); + + let mut statement = Statement::::new(6); + statement.add_constraint(Weights::evaluation(MultilinearPoint([FieldElement::from(0); 6].to_vec())), FieldElement::from(3)); + let prover = Prover(whir_config.clone()); + let proof = prover.prove(&mut merlin, statement.clone(), witness.clone())?; + + + let mut arthur = io.to_verifier_state(merlin.narg_string()); + let commitment_reader = CommitmentReader::new(&whir_config); + let commitment = commitment_reader.parse_commitment(&mut arthur)?; + + let claimed_ans = FieldElement::from(1) + FieldElement::from(2) * commitment.batching_randomness; + let actual_ans = witness.batched_poly().evaluate(&MultilinearPoint([FieldElement::from(0); 6].to_vec())); + + let verifier = Verifier::new(&whir_config); + verifier.verify(&mut arthur, &commitment, &statement)?; + + Ok(()) +} \ No newline at end of file From eb30b9f36018aac1646a5969131ffe75081df6d0 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 17 Sep 2025 17:38:16 +0800 Subject: [PATCH 05/34] Creates variable for number of variables --- spark-prover/src/bin/test-batched-whir.rs | 30 ++++++++++++++++++----- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/spark-prover/src/bin/test-batched-whir.rs b/spark-prover/src/bin/test-batched-whir.rs index 056b60af..05e5d2f2 100644 --- a/spark-prover/src/bin/test-batched-whir.rs +++ b/spark-prover/src/bin/test-batched-whir.rs @@ -4,19 +4,35 @@ use whir::{poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, use anyhow::Result; fn main() -> Result<()> { - let whir_config = WhirR1CSScheme::new_whir_config_for_size(6, 2); + const NUM_VARIABLES: usize = 5; // Change this + + let whir_config = WhirR1CSScheme::new_whir_config_for_size(NUM_VARIABLES, 2); let mut io = IOPattern::new("💥") .commit_statement(&whir_config) .add_whir_proof(&whir_config); let mut merlin = io.to_prover_state(); - let poly1 = EvaluationsList::new([FieldElement::from(1); 64].to_vec()).to_coeffs(); - let poly2 = EvaluationsList::new([FieldElement::from(2); 64].to_vec()).to_coeffs(); + let poly1 = EvaluationsList::new([FieldElement::from(1); 1<::new(6); - statement.add_constraint(Weights::evaluation(MultilinearPoint([FieldElement::from(0); 6].to_vec())), FieldElement::from(3)); + println!("{:?}", witness.batched_poly()); + + // let actual_ans = witness.batched_poly().evaluate(&MultilinearPoint([FieldElement::from(0); 7].to_vec())); + + let mut statement = Statement::::new(NUM_VARIABLES); + + let weight = Weights::linear(EvaluationsList::new([FieldElement::from(0); 1< Result<()> { let commitment = commitment_reader.parse_commitment(&mut arthur)?; let claimed_ans = FieldElement::from(1) + FieldElement::from(2) * commitment.batching_randomness; - let actual_ans = witness.batched_poly().evaluate(&MultilinearPoint([FieldElement::from(0); 6].to_vec())); + + // println!("{:?}", claimed_ans); + // println!("{:?}", actual_ans); let verifier = Verifier::new(&whir_config); verifier.verify(&mut arthur, &commitment, &statement)?; From a9bd8f96a65712dc0f093b644d2139c354f41570 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 18 Sep 2025 10:32:50 +0800 Subject: [PATCH 06/34] Adds batched WHIR to sumcheck+rowwise --- spark-prover/src/bin/generate_test_r1cs.rs | 2 +- spark-prover/src/bin/spark-verifier.rs | 188 ++++++++---------- spark-prover/src/spark.rs | 200 +++++++++++--------- spark-prover/src/utilities/iopattern/mod.rs | 41 ++-- spark-prover/src/whir.rs | 2 +- 5 files changed, 205 insertions(+), 228 deletions(-) diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs index 5c0c3fc7..298b489f 100644 --- a/spark-prover/src/bin/generate_test_r1cs.rs +++ b/spark-prover/src/bin/generate_test_r1cs.rs @@ -8,7 +8,7 @@ fn main() { r1cs.grow_matrices(1024, 512); let interned_1 = r1cs.interner.intern(FieldElement::from(1)); - for i in 0..64 { + for i in 0..256 { r1cs.a.set(i, i, interned_1); r1cs.b.set(i, i, interned_1); r1cs.c.set(i, i, interned_1); diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 33723ef2..90cb0d0a 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -40,15 +40,13 @@ fn main() -> Result<()> { let io = IOPattern::from_string(spark_proof.io_pattern); let mut arthur = io.to_verifier_state(&spark_proof.transcript); - let commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a); let commitment_reader_row = CommitmentReader::new(&spark_proof.whir_params.row); let a_spark_sumcheck_commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a_spark_sumcheck); let a_spark_sumcheck_commitment = a_spark_sumcheck_commitment_reader.parse_commitment(&mut arthur)?; + let a_spark_rowwise_commitment = a_spark_sumcheck_commitment_reader.parse_commitment(&mut arthur)?; let final_row_commitment = commitment_reader_row.parse_commitment(&mut arthur).unwrap(); - let row_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); - let read_ts_commitment = commitment_reader.parse_commitment(&mut arthur).unwrap(); let (randomness, last_sumcheck_value) = run_sumcheck_verifier_spark( &mut arthur, @@ -59,147 +57,111 @@ fn main() -> Result<()> { let final_folds: Vec = arthur.hint()?; + assert!(last_sumcheck_value == final_folds[0] * final_folds[1] * final_folds[2]); + let mut a_spark_sumcheck_statement_verifier = Statement::::new(next_power_of_two( spark_proof.matrix_dimensions.a_nonzero_terms, )); - // a_spark_sumcheck_statement_verifier.add_constraint( - // Weights::evaluation(MultilinearPoint(randomness.clone())), - // final_folds[0] + - // final_folds[1] * a_spark_sumcheck_commitment.batching_randomness + - // final_folds[2] * a_spark_sumcheck_commitment.batching_randomness * a_spark_sumcheck_commitment.batching_randomness, - // ); - println!("{:?}", final_folds[0] + final_folds[1] * a_spark_sumcheck_commitment.batching_randomness); - a_spark_sumcheck_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0] + final_folds[1] * a_spark_sumcheck_commitment.batching_randomness, + final_folds[0] + + final_folds[1] * a_spark_sumcheck_commitment.batching_randomness + + final_folds[2] * a_spark_sumcheck_commitment.batching_randomness * a_spark_sumcheck_commitment.batching_randomness, ); - // let + let a_spark_sumcheck_verifier = Verifier::new(&spark_proof.whir_params.a_spark_sumcheck); a_spark_sumcheck_verifier.verify(&mut arthur, &a_spark_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; - // val_verifier - // .verify(&mut arthur, &val_commitment, &val_statement_verifier) - // .context("while verifying WHIR")?; - - // let mut tau_and_gamma = [FieldElement::from(0); 2]; - // arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - // let tau = tau_and_gamma[0]; - // let gamma = tau_and_gamma[1]; - - // let gpa_result = gpa_sumcheck_verifier( - // &mut arthur, - // next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, - // )?; - - // let claimed_init = gpa_result.claimed_values[0]; - // let claimed_final = gpa_result.claimed_values[1]; - - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let init_mem = calculate_eq( - // &request.point_to_evaluate.row, - // &evaluation_randomness.to_vec(), - // ); - // let init_cntr = FieldElement::from(0); + // Rowwise - // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + let mut tau_and_gamma = [FieldElement::from(0); 2]; + arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; - // let mut final_cntr: FieldElement = arthur.hint()?; + let gpa_result = gpa_sumcheck_verifier( + &mut arthur, + next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, + )?; - // let mut final_cntr_statement = - // Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); - // final_cntr_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // final_cntr, - // ); + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); + let init_cntr = FieldElement::from(0); - // let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); - // final_cntr_verifier - // .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) - // .context("while verifying WHIR")?; + let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let final_mem = calculate_eq( - // &request.point_to_evaluate.row, - // &evaluation_randomness.to_vec(), - // ); + let final_cntr: FieldElement = arthur.hint()?; - // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + let mut final_cntr_statement = + Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); - // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - // + final_opening * last_randomness[0]; + let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); + final_cntr_verifier + .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; - // ensure!(evaluated_value == gpa_result.last_sumcheck_value); + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); - // // let mut rs_address: FieldElement = arthur.hint()?; - // let gpa_result = gpa_sumcheck_verifier( - // &mut arthur, - // next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, - // )?; + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - // let claimed_rs = gpa_result.claimed_values[0]; - // let claimed_ws = gpa_result.claimed_values[1]; + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + ensure!(evaluated_value == gpa_result.last_sumcheck_value); - // let rs_adr = arthur.hint()?; + let gpa_result = gpa_sumcheck_verifier( + &mut arthur, + next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, + )?; - // let mut rs_adr_statement = Statement::::new(next_power_of_two( - // spark_proof.matrix_dimensions.a_nonzero_terms, - // )); - // rs_adr_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // rs_adr, - // ); + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - // let rs_adr_verifier = Verifier::new(&spark_proof.whir_params.a); - // rs_adr_verifier - // .verify(&mut arthur, &row_commitment, &rs_adr_statement) - // .context("while verifying WHIR")?; + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; - // let rs_mem = arthur.hint()?; + let rs_adr: FieldElement = arthur.hint()?; + let rs_mem: FieldElement = arthur.hint()?; + let rs_timestamp: FieldElement = arthur.hint()?; - // let mut rs_val_statement = Statement::::new(next_power_of_two( - // spark_proof.matrix_dimensions.a_nonzero_terms, - // )); - // rs_val_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // rs_mem, - // ); + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + + ws_opening * last_randomness[0]; - // let rs_val_verifier = Verifier::new(&spark_proof.whir_params.a); - // rs_val_verifier - // .verify(&mut arthur, &e_rx_commitment, &rs_val_statement) - // .context("while verifying WHIR")?; + ensure!(evaluated_value == gpa_result.last_sumcheck_value); - // let rs_timestamp = arthur.hint()?; + let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( + spark_proof.matrix_dimensions.a_nonzero_terms, + )); - // let mut rs_timestamp_statement = Statement::::new(next_power_of_two( - // spark_proof.matrix_dimensions.a_nonzero_terms, - // )); - // rs_timestamp_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // rs_timestamp, - // ); - - // let rs_timestamp_verifier = Verifier::new(&spark_proof.whir_params.a); - // rs_timestamp_verifier - // .verify(&mut arthur, &read_ts_commitment, &rs_timestamp_statement) - // .context("while verifying WHIR")?; + a_spark_rowwise_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_adr + + rs_mem * a_spark_rowwise_commitment.batching_randomness + + rs_timestamp * a_spark_rowwise_commitment.batching_randomness * a_spark_rowwise_commitment.batching_randomness, + ); - // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - // let ws_opening = - // rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + a_spark_sumcheck_verifier.verify(&mut arthur, &a_spark_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; - // let evaluated_value = - // rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - - // ensure!(evaluated_value == gpa_result.last_sumcheck_value); - - // ensure!(claimed_init * claimed_ws == claimed_final * claimed_rs); + ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); Ok(()) } diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 15894b33..bc4c88e5 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -41,116 +41,136 @@ pub fn prove_spark_for_single_matrix( let e_rx_coeff = EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(); let e_ry_coeff = EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(); - // let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff, e_ry_coeff])?; - let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff]).expect("Failed to commit"); + let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff, e_ry_coeff])?; + + let row_addr_coeff = EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(); + let row_val_coeff = EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(); + let row_timestamp_coeff = EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(); + + let spark_rowwise_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[row_addr_coeff, row_val_coeff, row_timestamp_coeff])?; let final_row_ts_witness = commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); - let row_witness = commit_to_vector(&committer_a, merlin, matrix.coo.row.clone()); - let read_ts_witness = commit_to_vector(&committer_a, merlin, matrix.timestamps.read_row.clone()); let mles = [ matrix.coo.val.clone(), e_values.e_rx.clone(), e_values.e_ry.clone(), ]; + let (sumcheck_final_folds, folding_randomness) = run_spark_sumcheck(merlin, mles, claimed_value)?; let mut spark_sumcheck_statement = Statement::::new(folding_randomness.len()); - // let claimed_batched_value = - // sumcheck_final_folds[0] + - // sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness + - // sumcheck_final_folds[2] * spark_sumcheck_witness.batching_randomness * spark_sumcheck_witness.batching_randomness; - + let claimed_batched_value = sumcheck_final_folds[0] + - sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness; - - let actual_val = spark_sumcheck_witness.batched_poly().evaluate(&MultilinearPoint(folding_randomness.clone())); - ensure!(actual_val == claimed_batched_value); - println!("{:?}", actual_val); - println!("{:?}", claimed_batched_value); + sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness + + sumcheck_final_folds[2] * spark_sumcheck_witness.batching_randomness * spark_sumcheck_witness.batching_randomness; spark_sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let prover = Prover(whir_configs.a_spark_sumcheck.clone()); - prover.prove(merlin, spark_sumcheck_statement, spark_sumcheck_witness)?; - - // // Rowwise - - // let mut tau_and_gamma = [FieldElement::from(0); 2]; - // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - // let tau = tau_and_gamma[0]; - // let gamma = tau_and_gamma[1]; - - // let init_address: Vec = (0..memory.eq_rx.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let init_value = memory.eq_rx.clone(); - // let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; - - // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); - - // let final_address: Vec = (0..memory.eq_rx.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let final_value = memory.eq_rx.clone(); - // let final_timestamp = matrix.timestamps.final_row.clone(); - - // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); - - // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - // let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - // // TODO: Can I avoid evaluating here? - // let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&final_row_eval)?; - - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // final_row_eval, - // whir_configs.row.clone(), - // final_row_ts_witness, - // )?; - - // let rs_address = matrix.coo.row.clone(); - // let rs_value = e_values.e_rx.clone(); - // let rs_timestamp = matrix.timestamps.read_row.clone(); - - // let rs_vec: Vec = - // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let sumcheck_prover = Prover(whir_configs.a_spark_sumcheck.clone()); + sumcheck_prover.prove(merlin, spark_sumcheck_statement, spark_sumcheck_witness)?; + + // Rowwise + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + let init_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let init_value = memory.eq_rx.clone(); + let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; + + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let final_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let final_value = memory.eq_rx.clone(); + let final_timestamp = matrix.timestamps.final_row.clone(); + + let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // TODO: Can I avoid evaluating here? + let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&final_row_eval)?; + + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_row_eval, + whir_configs.row.clone(), + final_row_ts_witness, + )?; + + let rs_address = matrix.coo.row.clone(); + let rs_value = e_values.e_rx.clone(); + let rs_timestamp = matrix.timestamps.read_row.clone(); + + let rs_vec: Vec = + izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let ws_address = matrix.coo.row.clone(); + let ws_value = e_values.e_rx.clone(); + let ws_timestamp: Vec = matrix + .timestamps + .read_row + .into_iter() + .map(|a| a + FieldElement::from(1)) + .collect(); + + let ws_vec: Vec = + izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let rs_address_eval = EvaluationsList::new(rs_address) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_address_eval)?; + + let rs_value_eval = EvaluationsList::new(rs_value) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_value_eval)?; - // let ws_address = matrix.coo.row.clone(); - // let ws_value = e_values.e_rx.clone(); - // let ws_timestamp: Vec = matrix - // .timestamps - // .read_row - // .into_iter() - // .map(|a| a + FieldElement::from(1)) - // .collect(); + let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_timestamp_eval)?; - // let ws_vec: Vec = - // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let mut spark_rowwise_statement = Statement::::new(evaluation_randomness.len()); - // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + let claimed_rowwise_eval = + rs_address_eval + + rs_value_eval * spark_rowwise_witness.batching_randomness + + rs_timestamp_eval * spark_rowwise_witness.batching_randomness * spark_rowwise_witness.batching_randomness; - // let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + assert!(claimed_rowwise_eval == spark_rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - // let rs_address_eval = EvaluationsList::new(rs_address) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_address_eval)?; + spark_rowwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); + + let sumcheck_prover = Prover(whir_configs.a_spark_sumcheck.clone()); + sumcheck_prover.prove(merlin, spark_rowwise_statement, spark_rowwise_witness)?; + // produce_whir_proof( // merlin, // MultilinearPoint(evaluation_randomness.to_vec()), @@ -159,9 +179,6 @@ pub fn prove_spark_for_single_matrix( // row_witness.clone(), // )?; - // let rs_value_eval = EvaluationsList::new(rs_value) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_value_eval)?; // produce_whir_proof( // merlin, // MultilinearPoint(evaluation_randomness.to_vec()), @@ -170,9 +187,6 @@ pub fn prove_spark_for_single_matrix( // e_rx_witness.clone(), // )?; - // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_timestamp_eval)?; // produce_whir_proof( // merlin, // MultilinearPoint(evaluation_randomness.to_vec()), diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index ac05f7ab..6235957c 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -30,39 +30,40 @@ where pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { let mut io = IOPattern::new("💥") + .commit_statement(&configs.a_spark_sumcheck) .commit_statement(&configs.a_spark_sumcheck) .commit_statement(&configs.row) - .commit_statement(&configs.a) - .commit_statement(&configs.a) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) .hint("sumcheck_last_folds") .add_whir_proof(&configs.a_spark_sumcheck); - // .add_tau_and_gamma(); + + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.a.num_rows) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.a.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Row final counter claimed evaluation") - // .add_whir_proof(&configs.row); + io = io + .hint("Row final counter claimed evaluation") + .add_whir_proof(&configs.row); - // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .add_whir_proof(&configs.a); + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.a_spark_sumcheck); // io = io - // .hint("RS value claimed evaluation") + // .add_whir_proof(&configs.a); + // .add_whir_proof(&configs.a); // .add_whir_proof(&configs.a); // io = io - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.a); io } diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index 100a7547..5580ae80 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -51,7 +51,7 @@ pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { a: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 1), b: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 1), c: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 1), - a_spark_sumcheck: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 2), + a_spark_sumcheck: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 3), } } From 6a67ee07e11b76dc5d8adbc32c2b5a5c189e2084 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 18 Sep 2025 16:37:38 +0800 Subject: [PATCH 07/34] Adds B and C matrices --- spark-prover/src/bin/spark-verifier.rs | 224 ++++++++++++++++---- spark-prover/src/bin/test-batched-whir.rs | 53 ----- spark-prover/src/main.rs | 20 +- spark-prover/src/spark.rs | 203 +++++++++++++----- spark-prover/src/utilities/iopattern/mod.rs | 165 +++++++++++++- spark-prover/src/whir.rs | 8 +- 6 files changed, 512 insertions(+), 161 deletions(-) delete mode 100644 spark-prover/src/bin/test-batched-whir.rs diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 90cb0d0a..e64c4a4f 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -7,7 +7,7 @@ use { next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, }, - FieldElement, IOPattern, + FieldElement, IOPattern, WhirConfig, }, spark_prover::utilities::{SPARKProof, SPARKRequest}, spongefish::{ @@ -37,43 +37,101 @@ fn main() -> Result<()> { let request: SPARKRequest = serde_json::from_str(&request_json_str) .context("Error: Failed to deserialize JSON to R1CS")?; - let io = IOPattern::from_string(spark_proof.io_pattern); + let io = IOPattern::from_string(spark_proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&spark_proof.transcript); - let commitment_reader_row = CommitmentReader::new(&spark_proof.whir_params.row); - let a_spark_sumcheck_commitment_reader = CommitmentReader::new(&spark_proof.whir_params.a_spark_sumcheck); + verify_spark_single_matrix( + &spark_proof.whir_params.row, + &spark_proof.whir_params.col, + &spark_proof.whir_params.a_3batched, + spark_proof.matrix_dimensions.num_rows, + spark_proof.matrix_dimensions.num_cols, + spark_proof.matrix_dimensions.a_nonzero_terms, + &mut arthur, + &request, + &request.claimed_values.a, + )?; + + verify_spark_single_matrix( + &spark_proof.whir_params.row, + &spark_proof.whir_params.col, + &spark_proof.whir_params.b_3batched, + spark_proof.matrix_dimensions.num_rows, + spark_proof.matrix_dimensions.num_cols, + spark_proof.matrix_dimensions.b_nonzero_terms, + &mut arthur, + &request, + &request.claimed_values.b, + )?; + + verify_spark_single_matrix( + &spark_proof.whir_params.row, + &spark_proof.whir_params.col, + &spark_proof.whir_params.c_3batched, + spark_proof.matrix_dimensions.num_rows, + spark_proof.matrix_dimensions.num_cols, + spark_proof.matrix_dimensions.c_nonzero_terms, + &mut arthur, + &request, + &request.claimed_values.c, + )?; + + Ok(()) +} + +pub fn verify_spark_single_matrix( + row_config: &WhirConfig, + col_config: &WhirConfig, + num_nonzero_term_batched3_config: &WhirConfig, + num_rows: usize, + num_cols: usize, + num_nonzero_terms: usize, + arthur: &mut VerifierState, + request: &SPARKRequest, + claimed_value: &FieldElement, +) -> Result<()> { + let commitment_reader_row = CommitmentReader::new(row_config); + let commitment_reader_col = CommitmentReader::new(col_config); + + // Matrix A - let a_spark_sumcheck_commitment = a_spark_sumcheck_commitment_reader.parse_commitment(&mut arthur)?; - let a_spark_rowwise_commitment = a_spark_sumcheck_commitment_reader.parse_commitment(&mut arthur)?; + let a_3batched_commitment_reader = CommitmentReader::new(num_nonzero_term_batched3_config); + + let a_sumcheck_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - let final_row_commitment = commitment_reader_row.parse_commitment(&mut arthur).unwrap(); + let a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur).unwrap(); + let a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur).unwrap(); + + // Matrix A - Sumcheck - let (randomness, last_sumcheck_value) = run_sumcheck_verifier_spark( - &mut arthur, - next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms), - request.claimed_values.a, + let (randomness, a_last_sumcheck_value) = run_sumcheck_verifier_spark( + arthur, + next_power_of_two(num_nonzero_terms), + *claimed_value, ) .context("While verifying SPARK sumcheck")?; let final_folds: Vec = arthur.hint()?; - assert!(last_sumcheck_value == final_folds[0] * final_folds[1] * final_folds[2]); + assert!(a_last_sumcheck_value == final_folds[0] * final_folds[1] * final_folds[2]); let mut a_spark_sumcheck_statement_verifier = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, + num_nonzero_terms, )); a_spark_sumcheck_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(randomness.clone())), final_folds[0] + - final_folds[1] * a_spark_sumcheck_commitment.batching_randomness + - final_folds[2] * a_spark_sumcheck_commitment.batching_randomness * a_spark_sumcheck_commitment.batching_randomness, + final_folds[1] * a_sumcheck_commitment.batching_randomness + + final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, ); - let a_spark_sumcheck_verifier = Verifier::new(&spark_proof.whir_params.a_spark_sumcheck); - a_spark_sumcheck_verifier.verify(&mut arthur, &a_spark_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; + let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; - // Rowwise + // Matrix A - Rowwise let mut tau_and_gamma = [FieldElement::from(0); 2]; arthur.fill_challenge_scalars(&mut tau_and_gamma)?; @@ -81,8 +139,8 @@ fn main() -> Result<()> { let gamma = tau_and_gamma[1]; let gpa_result = gpa_sumcheck_verifier( - &mut arthur, - next_power_of_two(spark_proof.matrix_dimensions.num_rows) + 2, + arthur, + next_power_of_two(num_rows) + 2, )?; let claimed_init = gpa_result.claimed_values[0]; @@ -102,15 +160,15 @@ fn main() -> Result<()> { let final_cntr: FieldElement = arthur.hint()?; let mut final_cntr_statement = - Statement::::new(next_power_of_two(spark_proof.matrix_dimensions.num_rows)); + Statement::::new(next_power_of_two(num_rows)); final_cntr_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), final_cntr, ); - let final_cntr_verifier = Verifier::new(&spark_proof.whir_params.row); + let final_cntr_verifier = Verifier::new(row_config); final_cntr_verifier - .verify(&mut arthur, &final_row_commitment, &final_cntr_statement) + .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) .context("while verifying WHIR")?; let final_adr = calculate_adr(&evaluation_randomness.to_vec()); @@ -124,11 +182,11 @@ fn main() -> Result<()> { let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + final_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.last_sumcheck_value); + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let gpa_result = gpa_sumcheck_verifier( - &mut arthur, - next_power_of_two(spark_proof.matrix_dimensions.a_nonzero_terms) + 2, + arthur, + next_power_of_two(num_nonzero_terms) + 2, )?; let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); @@ -146,20 +204,110 @@ fn main() -> Result<()> { let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.last_sumcheck_value); + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( - spark_proof.matrix_dimensions.a_nonzero_terms, + num_nonzero_terms, )); a_spark_rowwise_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), rs_adr + - rs_mem * a_spark_rowwise_commitment.batching_randomness + - rs_timestamp * a_spark_rowwise_commitment.batching_randomness * a_spark_rowwise_commitment.batching_randomness, + rs_mem * a_rowwise_commitment.batching_randomness + + rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, + ); + + a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + + ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + + // Matrix A - Colwise + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_cols) + 2, + )?; + + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = calculate_eq( + &request.point_to_evaluate.col, + &evaluation_randomness.to_vec(), + ); + let init_cntr = FieldElement::from(0); + + let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + + let final_cntr: FieldElement = arthur.hint()?; + + let mut final_cntr_statement = + Statement::::new(next_power_of_two(num_cols)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); + + let final_cntr_verifier = Verifier::new(col_config); + final_cntr_verifier + .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; + + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = calculate_eq( + &request.point_to_evaluate.col, + &evaluation_randomness.to_vec(), + ); + + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_nonzero_terms) + 2, + )?; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; + + let rs_adr: FieldElement = arthur.hint()?; + let rs_mem: FieldElement = arthur.hint()?; + let rs_timestamp: FieldElement = arthur.hint()?; + + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + + ws_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + + let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( + num_nonzero_terms, + )); + + a_spark_colwise_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_adr + + rs_mem * a_colwise_commitment.batching_randomness + + rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, ); - a_spark_sumcheck_verifier.verify(&mut arthur, &a_spark_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -212,7 +360,7 @@ pub fn gpa_sumcheck_verifier( arthur .fill_challenge_scalars(&mut r) .expect("Failed to fill next scalars"); - let mut last_sumcheck_value = eval_linear_poly(&claimed_values, &r[0]); + let mut a_last_sumcheck_value = eval_linear_poly(&claimed_values, &r[0]); rand.push(r[0]); prev_rand = rand; @@ -229,10 +377,10 @@ pub fn gpa_sumcheck_verifier( assert_eq!( eval_cubic_poly(&h, &FieldElement::from(0)) + eval_cubic_poly(&h, &FieldElement::from(1)), - last_sumcheck_value + a_last_sumcheck_value ); rand.push(alpha[0]); - last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); + a_last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); } arthur .fill_next_scalars(&mut l) @@ -243,23 +391,23 @@ pub fn gpa_sumcheck_verifier( let claimed_last_sch = calculate_eq(&prev_rand, &rand) * eval_linear_poly(&l, &FieldElement::from(0)) * eval_linear_poly(&l, &FieldElement::from(1)); - assert_eq!(claimed_last_sch, last_sumcheck_value); + assert_eq!(claimed_last_sch, a_last_sumcheck_value); rand.push(r[0]); prev_rand = rand; rand = Vec::::new(); - last_sumcheck_value = eval_linear_poly(&l, &r[0]); + a_last_sumcheck_value = eval_linear_poly(&l, &r[0]); } Ok(GPASumcheckResult { claimed_values: claimed_values.to_vec(), - last_sumcheck_value, + a_last_sumcheck_value, randomness: prev_rand, }) } pub struct GPASumcheckResult { pub claimed_values: Vec, - pub last_sumcheck_value: FieldElement, + pub a_last_sumcheck_value: FieldElement, pub randomness: Vec, } diff --git a/spark-prover/src/bin/test-batched-whir.rs b/spark-prover/src/bin/test-batched-whir.rs deleted file mode 100644 index 05e5d2f2..00000000 --- a/spark-prover/src/bin/test-batched-whir.rs +++ /dev/null @@ -1,53 +0,0 @@ -use provekit_common::{FieldElement, IOPattern, WhirR1CSScheme}; -use provekit_r1cs_compiler::WhirR1CSSchemeBuilder; -use whir::{poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{committer::{CommitmentReader, CommitmentWriter}, domainsep::WhirDomainSeparator, prover::Prover, statement::{Statement, Weights}, verifier::Verifier}}; -use anyhow::Result; - -fn main() -> Result<()> { - const NUM_VARIABLES: usize = 5; // Change this - - let whir_config = WhirR1CSScheme::new_whir_config_for_size(NUM_VARIABLES, 2); - let mut io = IOPattern::new("💥") - .commit_statement(&whir_config) - .add_whir_proof(&whir_config); - let mut merlin = io.to_prover_state(); - - let poly1 = EvaluationsList::new([FieldElement::from(1); 1<::new(NUM_VARIABLES); - - let weight = Weights::linear(EvaluationsList::new([FieldElement::from(0); 1< Result<()> { prove_spark_for_single_matrix( &mut merlin, spark_r1cs.a, - memory, + &memory, e_values.a, request.claimed_values.a, &spark_whir_configs, )?; + prove_spark_for_single_matrix( + &mut merlin, + spark_r1cs.b, + &memory, + e_values.b, + request.claimed_values.b, + &spark_whir_configs, + )?; + + prove_spark_for_single_matrix( + &mut merlin, + spark_r1cs.c, + &memory, + e_values.c, + request.claimed_values.c, + &spark_whir_configs, + )?; + let spark_proof = SPARKProof { transcript: merlin.narg_string().to_vec(), io_pattern: String::from_utf8(io_pattern.as_bytes().to_vec()).unwrap(), diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index bc4c88e5..a5c45dc6 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -28,28 +28,37 @@ use { pub fn prove_spark_for_single_matrix( merlin: &mut ProverState, matrix: SparkMatrix, - memory: Memory, + memory: &Memory, e_values: EValuesForMatrix, claimed_value: FieldElement, whir_configs: &SPARKWHIRConfigs, ) -> Result<()> { - let committer_a = CommitmentWriter::new(whir_configs.a.clone()); - let committer_row = CommitmentWriter::new(whir_configs.row.clone()); - let a_spark_sumcheck_committer = CommitmentWriter::new(whir_configs.a_spark_sumcheck.clone()); - - let val_coeff = EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(); - let e_rx_coeff = EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(); - let e_ry_coeff = EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(); - - let spark_sumcheck_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[val_coeff, e_rx_coeff, e_ry_coeff])?; - - let row_addr_coeff = EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(); - let row_val_coeff = EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(); - let row_timestamp_coeff = EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(); - - let spark_rowwise_witness = a_spark_sumcheck_committer.commit_batch(merlin, &[row_addr_coeff, row_val_coeff, row_timestamp_coeff])?; - - let final_row_ts_witness = commit_to_vector(&committer_row, merlin, matrix.timestamps.final_row.clone()); + let row_committer = CommitmentWriter::new(whir_configs.row.clone()); + let col_committer = CommitmentWriter::new(whir_configs.col.clone()); + let a_3batched_committer = CommitmentWriter::new(whir_configs.a_3batched.clone()); + + let sumcheck_witness = a_3batched_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), + ])?; + + let rowwise_witness = a_3batched_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), + EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(), + ])?; + + let colwise_witness = a_3batched_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.col.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), + EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), + ])?; + + let final_row_ts_witness = commit_to_vector(&row_committer, merlin, matrix.timestamps.final_row.clone()); + let final_col_ts_witness = commit_to_vector(&col_committer, merlin, matrix.timestamps.final_col.clone()); + + // Sumcheck let mles = [ matrix.coo.val.clone(), @@ -60,21 +69,23 @@ pub fn prove_spark_for_single_matrix( let (sumcheck_final_folds, folding_randomness) = run_spark_sumcheck(merlin, mles, claimed_value)?; - let mut spark_sumcheck_statement = Statement::::new(folding_randomness.len()); + let mut sumcheck_statement = Statement::::new(folding_randomness.len()); let claimed_batched_value = sumcheck_final_folds[0] + - sumcheck_final_folds[1] * spark_sumcheck_witness.batching_randomness + - sumcheck_final_folds[2] * spark_sumcheck_witness.batching_randomness * spark_sumcheck_witness.batching_randomness; + sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + + sumcheck_final_folds[2] * sumcheck_witness.batching_randomness * sumcheck_witness.batching_randomness; - spark_sumcheck_statement.add_constraint( + sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover(whir_configs.a_spark_sumcheck.clone()); - sumcheck_prover.prove(merlin, spark_sumcheck_statement, spark_sumcheck_witness)?; + let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise + // Rowwise Init Final GPA + let mut tau_and_gamma = [FieldElement::from(0); 2]; merlin.fill_challenge_scalars(&mut tau_and_gamma)?; let tau = tau_and_gamma[0]; @@ -102,9 +113,8 @@ pub fn prove_spark_for_single_matrix( let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - // TODO: Can I avoid evaluating here? let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&final_row_eval)?; @@ -117,6 +127,8 @@ pub fn prove_spark_for_single_matrix( final_row_ts_witness, )?; + // Rowwise RS WS GPA + let rs_address = matrix.coo.row.clone(); let rs_value = e_values.e_rx.clone(); let rs_timestamp = matrix.timestamps.read_row.clone(); @@ -142,7 +154,7 @@ pub fn prove_spark_for_single_matrix( let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - let (combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); let rs_address_eval = EvaluationsList::new(rs_address) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); @@ -156,44 +168,121 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_timestamp_eval)?; - let mut spark_rowwise_statement = Statement::::new(evaluation_randomness.len()); + let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); let claimed_rowwise_eval = rs_address_eval + - rs_value_eval * spark_rowwise_witness.batching_randomness + - rs_timestamp_eval * spark_rowwise_witness.batching_randomness * spark_rowwise_witness.batching_randomness; + rs_value_eval * rowwise_witness.batching_randomness + + rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; - assert!(claimed_rowwise_eval == spark_rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - spark_rowwise_statement.add_constraint( + rowwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover(whir_configs.a_spark_sumcheck.clone()); - sumcheck_prover.prove(merlin, spark_rowwise_statement, spark_rowwise_witness)?; + let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + + // Colwise + + // Colwise Init Final GPA + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + let init_address: Vec = (0..memory.eq_ry.len() as u64) + .map(FieldElement::from) + .collect(); + let init_value = memory.eq_ry.clone(); + let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; + + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let final_address: Vec = (0..memory.eq_ry.len() as u64) + .map(FieldElement::from) + .collect(); + let final_value = memory.eq_ry.clone(); + let final_timestamp = matrix.timestamps.final_col.clone(); + + let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&final_col_eval)?; + + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_col_eval, + whir_configs.col.clone(), + final_col_ts_witness, + )?; + + // Colwise RS WS GPA + + let rs_address = matrix.coo.col.clone(); + let rs_value = e_values.e_ry.clone(); + let rs_timestamp = matrix.timestamps.read_col.clone(); + + let rs_vec: Vec = + izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let ws_address = matrix.coo.col.clone(); + let ws_value = e_values.e_ry.clone(); + let ws_timestamp: Vec = matrix + .timestamps + .read_col + .into_iter() + .map(|a| a + FieldElement::from(1)) + .collect(); + + let ws_vec: Vec = + izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let rs_address_eval = EvaluationsList::new(rs_address) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_address_eval)?; + + let rs_value_eval = EvaluationsList::new(rs_value) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_value_eval)?; + + let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_timestamp_eval)?; + + let mut colwise_statement = Statement::::new(evaluation_randomness.len()); + + let claimed_colwise_eval = + rs_address_eval + + rs_value_eval * colwise_witness.batching_randomness + + rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; + + assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + + colwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // rs_address_eval, - // whir_configs.a.clone(), - // row_witness.clone(), - // )?; - - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // rs_value_eval, - // whir_configs.a.clone(), - // e_rx_witness.clone(), - // )?; - - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // rs_timestamp_eval, - // whir_configs.a.clone(), - // read_ts_witness.clone(), - // )?; + let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 6235957c..78480f42 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -29,14 +29,22 @@ where } pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { - let mut io = IOPattern::new("💥") - .commit_statement(&configs.a_spark_sumcheck) - .commit_statement(&configs.a_spark_sumcheck) + let mut io = IOPattern::new("💥"); + + // Matrix A + + io = io + .commit_statement(&configs.a_3batched) + .commit_statement(&configs.a_3batched) + .commit_statement(&configs.a_3batched) .commit_statement(&configs.row) + .commit_statement(&configs.col) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) .hint("sumcheck_last_folds") - .add_whir_proof(&configs.a_spark_sumcheck); + .add_whir_proof(&configs.a_3batched); + // Rowwise + io = io.add_tau_and_gamma(); for i in 0..=next_power_of_two(r1cs.a.num_rows) { @@ -57,13 +65,150 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .hint("RS address claimed evaluation") .hint("RS value claimed evaluation") .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.a_spark_sumcheck); + .add_whir_proof(&configs.a_3batched); + + // Colwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.a.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Col final counter claimed evaluation") + .add_whir_proof(&configs.col); + + for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.a_3batched); + + // Matrix B + + io = io + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.row) + .commit_statement(&configs.col) + .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.b_3batched); + + // Rowwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.b.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Row final counter claimed evaluation") + .add_whir_proof(&configs.row); + + for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.b_3batched); + + // Colwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.b.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Col final counter claimed evaluation") + .add_whir_proof(&configs.col); + + for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.b_3batched); + + // Matrix C + + io = io + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.row) + .commit_statement(&configs.col) + .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.c_3batched); + + // Rowwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.c.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Row final counter claimed evaluation") + .add_whir_proof(&configs.row); + + for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.c_3batched); - // io = io - // .add_whir_proof(&configs.a); - // .add_whir_proof(&configs.a); - // .add_whir_proof(&configs.a); + // Colwise - // io = io + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.c.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("Col final counter claimed evaluation") + .add_whir_proof(&configs.col); + + for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("RS address claimed evaluation") + .hint("RS value claimed evaluation") + .hint("RS timestamp claimed evaluation") + .add_whir_proof(&configs.c_3batched); io } diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index 5580ae80..2d684cb6 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -41,7 +41,9 @@ pub struct SPARKWHIRConfigs { pub a: WhirConfig, pub b: WhirConfig, pub c: WhirConfig, - pub a_spark_sumcheck: WhirConfig, + pub a_3batched: WhirConfig, + pub b_3batched: WhirConfig, + pub c_3batched: WhirConfig, } pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { @@ -51,7 +53,9 @@ pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { a: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 1), b: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 1), c: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 1), - a_spark_sumcheck: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 3), + a_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 3), + b_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 3), + c_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 3), } } From 204259e0a8efee0aba89d9fa1a4ef67229a75e7f Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Mon, 22 Sep 2025 17:04:42 +0800 Subject: [PATCH 08/34] Writes request to file --- provekit/common/src/lib.rs | 1 + provekit/common/src/spark.rs | 28 ++++++++++++++ provekit/prover/Cargo.toml | 1 + provekit/prover/src/whir_r1cs.rs | 37 +++++++++++-------- spark-prover/src/bin/generate_test_request.rs | 3 +- spark-prover/src/bin/spark-verifier.rs | 8 ++-- spark-prover/src/memory.rs | 4 +- spark-prover/src/spark.rs | 6 +-- spark-prover/src/utilities/mod.rs | 27 +------------- spark-prover/src/whir.rs | 2 +- 10 files changed, 62 insertions(+), 55 deletions(-) create mode 100644 provekit/common/src/spark.rs diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs index 68efb571..7eb976a2 100644 --- a/provekit/common/src/lib.rs +++ b/provekit/common/src/lib.rs @@ -7,6 +7,7 @@ mod sparse_matrix; pub mod utils; mod whir_r1cs; pub mod witness; +pub mod spark; use crate::interner::{InternedFieldElement, Interner}; pub use { diff --git a/provekit/common/src/spark.rs b/provekit/common/src/spark.rs new file mode 100644 index 00000000..1921a7af --- /dev/null +++ b/provekit/common/src/spark.rs @@ -0,0 +1,28 @@ +use serde::{Deserialize, Serialize}; +use crate::FieldElement; +use crate::utils::serde_ark; + + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct Point { + #[serde(with = "serde_ark")] + pub row: Vec, + #[serde(with = "serde_ark")] + pub col: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct ClaimedValues { + #[serde(with = "serde_ark")] + pub a: FieldElement, + #[serde(with = "serde_ark")] + pub b: FieldElement, + #[serde(with = "serde_ark")] + pub c: FieldElement, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct SPARKRequest { + pub point_to_evaluate: Point, + pub claimed_values: ClaimedValues, +} diff --git a/provekit/prover/Cargo.toml b/provekit/prover/Cargo.toml index 5584465d..4faf7522 100644 --- a/provekit/prover/Cargo.toml +++ b/provekit/prover/Cargo.toml @@ -12,6 +12,7 @@ repository.workspace = true # Workspace crates provekit-common.workspace = true skyscraper.workspace = true +serde_json.workspace = true # Noir language acir.workspace = true diff --git a/provekit/prover/src/whir_r1cs.rs b/provekit/prover/src/whir_r1cs.rs index 26bfc1a7..5356e7e4 100644 --- a/provekit/prover/src/whir_r1cs.rs +++ b/provekit/prover/src/whir_r1cs.rs @@ -1,10 +1,6 @@ use { - anyhow::{ensure, Result}, - ark_ff::UniformRand, - ark_std::{One, Zero}, - provekit_common::{ - skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, - utils::{ + anyhow::{ensure, Result}, ark_ff::UniformRand, ark_std::{One, Zero}, provekit_common::{ + file::write, skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, spark::{self, ClaimedValues, Point, SPARKRequest}, utils::{ pad_to_power_of_two, sumcheck::{ calculate_evaluations_over_boolean_hypercube_for_eq, @@ -13,15 +9,11 @@ use { }, zk_utils::{create_masked_polynomial, generate_random_multilinear_polynomial}, HALF, - }, - FieldElement, IOPattern, WhirConfig, WhirR1CSProof, WhirR1CSScheme, R1CS, - }, - spongefish::{ + }, FieldElement, IOPattern, SparseMatrix, WhirConfig, WhirR1CSProof, WhirR1CSScheme, R1CS + }, spongefish::{ codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, ProverState, - }, - tracing::{info, instrument, warn}, - whir::{ + }, std::{fs::File, io::Write}, tracing::{info, instrument, warn}, whir::{ poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{ committer::{CommitmentWriter, Witness}, @@ -30,7 +22,7 @@ use { statement::{Statement, Weights}, utils::HintSerialize, }, - }, + } }; pub trait WhirR1CSProver { @@ -91,11 +83,26 @@ impl WhirR1CSProver for WhirR1CSScheme { .hint::<(Vec, Vec)>(&(f_sums.to_vec(), g_sums.to_vec())); // Compute WHIR weighted batch opening proof - let (merlin, ..) = + let (merlin, whir_randomness, deferred_evaluations) = run_zk_whir_pcs_prover(commitment_to_witness, statement, &self.whir_witness, merlin); let transcript = merlin.narg_string().to_vec(); + let spark_request: SPARKRequest = SPARKRequest { + point_to_evaluate: Point { + row: alpha, + col: whir_randomness.0, + }, + claimed_values: ClaimedValues { + a: deferred_evaluations[0], + b: deferred_evaluations[1], + c: deferred_evaluations[2], + } + }; + + let mut spark_request_file = File::create("spark_request.json")?; // Creates or truncates the spark_request_file + spark_request_file.write_all(serde_json::to_string(&spark_request).unwrap().as_bytes())?; // Writes bytes to the file + Ok(WhirR1CSProof { transcript }) } } diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index 4cc4f7f5..37e1aaaf 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -1,6 +1,5 @@ use { - provekit_common::FieldElement, - spark_prover::utilities::{ClaimedValues, Point, SPARKRequest}, + provekit_common::{spark::{ClaimedValues, Point, SPARKRequest}, FieldElement}, std::{fs::File, io::Write}, }; diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index e64c4a4f..425e2e3f 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -2,14 +2,12 @@ use { anyhow::{ensure, Context, Result}, ark_std::{One, Zero}, provekit_common::{ - skyscraper::SkyscraperSponge, - utils::{ + skyscraper::SkyscraperSponge, spark::SPARKRequest, utils::{ next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, - }, - FieldElement, IOPattern, WhirConfig, + }, FieldElement, IOPattern, WhirConfig }, - spark_prover::utilities::{SPARKProof, SPARKRequest}, + spark_prover::utilities::SPARKProof, spongefish::{ codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, VerifierState, diff --git a/spark-prover/src/memory.rs b/spark-prover/src/memory.rs index 76016fe6..4b954f60 100644 --- a/spark-prover/src/memory.rs +++ b/spark-prover/src/memory.rs @@ -1,8 +1,6 @@ use { - crate::utilities::Point, provekit_common::{ - utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, FieldElement, - HydratedSparseMatrix, R1CS, + spark::Point, utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, FieldElement, HydratedSparseMatrix, R1CS }, }; diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index a5c45dc6..58cc846a 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -79,7 +79,7 @@ pub fn prove_spark_for_single_matrix( sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise @@ -180,7 +180,7 @@ pub fn prove_spark_for_single_matrix( rowwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // Colwise @@ -281,7 +281,7 @@ pub fn prove_spark_for_single_matrix( colwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - let sumcheck_prover = Prover(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index b967aef1..79c39202 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -4,8 +4,7 @@ use { crate::whir::SPARKWHIRConfigs, anyhow::{Context, Result}, provekit_common::{ - utils::{serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, - FieldElement, HydratedSparseMatrix, WhirConfig, R1CS, + spark::SPARKRequest, utils::{next_power_of_two, serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, WhirConfig, R1CS }, serde::{Deserialize, Serialize}, std::fs, @@ -24,30 +23,6 @@ pub fn deserialize_request(path_str: &str) -> Result { serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct SPARKRequest { - pub point_to_evaluate: Point, - pub claimed_values: ClaimedValues, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct Point { - #[serde(with = "serde_ark")] - pub row: Vec, - #[serde(with = "serde_ark")] - pub col: Vec, -} - -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct ClaimedValues { - #[serde(with = "serde_ark")] - pub a: FieldElement, - #[serde(with = "serde_ark")] - pub b: FieldElement, - #[serde(with = "serde_ark")] - pub c: FieldElement, -} - #[derive(Serialize, Deserialize)] pub struct SPARKProof { pub transcript: Vec, diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index 2d684cb6..d6dbad23 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -68,7 +68,7 @@ pub fn produce_whir_proof( ) -> Result<()> { let mut statement = Statement::::new(evaluation_point.num_variables()); statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); - let prover = Prover(config); + let prover = Prover::new(config); prover .prove(merlin, statement, witness) From 1f7a0eba9120b7315b924ee1adb069e6e97530ac Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 23 Sep 2025 10:58:10 +0800 Subject: [PATCH 09/34] Pads matrices --- .../app/circuit/matrix_evaluation.go | 1 + spark-prover/src/bin/spark-verifier.rs | 8 +++--- spark-prover/src/gpa.rs | 7 +++-- spark-prover/src/main.rs | 6 +++- spark-prover/src/memory.rs | 12 ++++++-- spark-prover/src/spark.rs | 28 +++++++++++++------ spark-prover/src/utilities/matrix/mod.rs | 19 ++++++++++++- spark-prover/src/utilities/mod.rs | 7 ++++- 8 files changed, 67 insertions(+), 21 deletions(-) diff --git a/recursive-verifier/app/circuit/matrix_evaluation.go b/recursive-verifier/app/circuit/matrix_evaluation.go index adaa8466..affb4d8d 100644 --- a/recursive-verifier/app/circuit/matrix_evaluation.go +++ b/recursive-verifier/app/circuit/matrix_evaluation.go @@ -1,6 +1,7 @@ package circuit import ( + "fmt" "math/big" "github.com/consensys/gnark/frontend" diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 425e2e3f..2770800f 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -238,9 +238,9 @@ pub fn verify_spark_single_matrix( let init_adr = calculate_adr(&evaluation_randomness.to_vec()); let init_mem = calculate_eq( - &request.point_to_evaluate.col, + &request.point_to_evaluate.col[1..], &evaluation_randomness.to_vec(), - ); + ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); let init_cntr = FieldElement::from(0); let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; @@ -261,9 +261,9 @@ pub fn verify_spark_single_matrix( let final_adr = calculate_adr(&evaluation_randomness.to_vec()); let final_mem = calculate_eq( - &request.point_to_evaluate.col, + &request.point_to_evaluate.col[1..], &evaluation_randomness.to_vec(), - ); + ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; diff --git a/spark-prover/src/gpa.rs b/spark-prover/src/gpa.rs index 26423f1e..cfa1273e 100644 --- a/spark-prover/src/gpa.rs +++ b/spark-prover/src/gpa.rs @@ -2,11 +2,10 @@ use { provekit_common::{ skyscraper::SkyscraperSponge, utils::{ - sumcheck::{ + next_power_of_two, sumcheck::{ calculate_evaluations_over_boolean_hypercube_for_eq, eval_cubic_poly, sumcheck_fold_map_reduce, - }, - HALF, + }, HALF }, FieldElement, }, @@ -55,6 +54,8 @@ pub fn run_gpa( fn calculate_binary_multiplication_tree( array_to_prove: Vec, ) -> Vec> { + println!("{:?}", array_to_prove.len()); + assert!(array_to_prove.len() == 1 << next_power_of_two(array_to_prove.len())); let mut layers = vec![]; let mut current_layer = array_to_prove; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 4d4ee216..d52567b5 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -10,7 +10,7 @@ use { }, whir::create_whir_configs, }, - std::{fs::File, io::Write}, + std::{fs::File, io::Write, mem}, }; fn main() -> Result<()> { @@ -23,6 +23,7 @@ fn main() -> Result<()> { // Run for each request let request = deserialize_request("spark-prover/request.json") .context("Error: Failed to deserialize the request object")?; + let memory = calculate_memory(request.point_to_evaluate); let e_values = calculate_e_values_for_r1cs(&memory, &r1cs); let io_pattern = create_io_pattern(&r1cs, &spark_whir_configs); @@ -35,6 +36,7 @@ fn main() -> Result<()> { e_values.a, request.claimed_values.a, &spark_whir_configs, + &spark_whir_configs.a_3batched, )?; prove_spark_for_single_matrix( @@ -44,6 +46,7 @@ fn main() -> Result<()> { e_values.b, request.claimed_values.b, &spark_whir_configs, + &spark_whir_configs.b_3batched, )?; prove_spark_for_single_matrix( @@ -53,6 +56,7 @@ fn main() -> Result<()> { e_values.c, request.claimed_values.c, &spark_whir_configs, + &spark_whir_configs.c_3batched, )?; let spark_proof = SPARKProof { diff --git a/spark-prover/src/memory.rs b/spark-prover/src/memory.rs index 4b954f60..9d7ebab0 100644 --- a/spark-prover/src/memory.rs +++ b/spark-prover/src/memory.rs @@ -1,6 +1,6 @@ use { provekit_common::{ - spark::Point, utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, FieldElement, HydratedSparseMatrix, R1CS + spark::Point, utils::{next_power_of_two, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, R1CS }, }; @@ -26,7 +26,7 @@ pub struct EValues { pub fn calculate_memory(point_to_evaluate: Point) -> Memory { Memory { eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), - eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col), + eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col[1..]).iter().map(|x| *x * (FieldElement::from(1) - point_to_evaluate.col[0])).collect(), } } @@ -49,5 +49,13 @@ pub fn calculate_e_values_for_matrix( e_rx.push(memory.eq_rx[r]); e_ry.push(memory.eq_ry[c]); } + + let to_pad = (1< Result<()> { let row_committer = CommitmentWriter::new(whir_configs.row.clone()); let col_committer = CommitmentWriter::new(whir_configs.col.clone()); - let a_3batched_committer = CommitmentWriter::new(whir_configs.a_3batched.clone()); + let batched_committer = CommitmentWriter::new(batched_config.clone()); - let sumcheck_witness = a_3batched_committer.commit_batch(merlin, &[ + let sumcheck_witness = batched_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(), EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), ])?; - let rowwise_witness = a_3batched_committer.commit_batch(merlin, &[ + let rowwise_witness = batched_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(), EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(), ])?; - let colwise_witness = a_3batched_committer.commit_batch(merlin, &[ + let colwise_witness = batched_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.col.clone()).to_coeffs(), EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), @@ -79,7 +80,7 @@ pub fn prove_spark_for_single_matrix( sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(batched_config.clone()); sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise @@ -110,7 +111,8 @@ pub fn prove_spark_for_single_matrix( let final_vec: Vec = izip!(final_address, final_value, final_timestamp) .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); - + + println!("Rowwise init final gpa"); let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -152,6 +154,7 @@ pub fn prove_spark_for_single_matrix( .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); + println!("Rowwise init final gpa"); let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -180,7 +183,7 @@ pub fn prove_spark_for_single_matrix( rowwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(batched_config.clone()); sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // Colwise @@ -198,6 +201,11 @@ pub fn prove_spark_for_single_matrix( let init_value = memory.eq_ry.clone(); let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; + println!("{:?}", memory.eq_ry.len()); + println!("{:?}", init_address.len()); + println!("{:?}", init_value.len()); + println!("{:?}", init_timestamp.len()); + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); @@ -212,6 +220,8 @@ pub fn prove_spark_for_single_matrix( .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); + println!("Init vec{:?}", init_vec.len()); + println!("Final vec{:?}", final_vec.len()); let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -281,7 +291,7 @@ pub fn prove_spark_for_single_matrix( colwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - let sumcheck_prover = Prover::new(whir_configs.a_3batched.clone()); + let sumcheck_prover = Prover::new(batched_config.clone()); sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) diff --git a/spark-prover/src/utilities/matrix/mod.rs b/spark-prover/src/utilities/matrix/mod.rs index 5cba9545..2950b718 100644 --- a/spark-prover/src/utilities/matrix/mod.rs +++ b/spark-prover/src/utilities/matrix/mod.rs @@ -1,4 +1,5 @@ -use provekit_common::{FieldElement, HydratedSparseMatrix, SparseMatrix, R1CS}; +use ark_ff::Field; +use provekit_common::{utils::next_power_of_two, FieldElement, HydratedSparseMatrix, SparseMatrix, R1CS}; #[derive(Debug)] pub struct SparkR1CS { @@ -51,6 +52,13 @@ pub fn get_coordinate_rep_of_a_matrix(matrix: &HydratedSparseMatrix) -> COOMatri val.push(value.clone()); } + let to_pad = (1< TimeStamps { read_col_counters[c] += 1; } + let to_pad = (1< Result { let json_str = fs::read_to_string(path_str).context("Error: Failed to open the r1cs.json file")?; - serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") + let mut r1cs: R1CS = serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; + r1cs.grow_matrices( + 1< Result { From 37524f9855853f7dfbd61d821ee9378f5a8e6fea Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 23 Sep 2025 14:47:24 +0800 Subject: [PATCH 10/34] Added cli flag and removed debug log --- .../app/circuit/matrix_evaluation.go | 1 - recursive-verifier/cmd/cli/main.go | 20 +++++++++++++++++++ spark-prover/src/spark.rs | 9 --------- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/recursive-verifier/app/circuit/matrix_evaluation.go b/recursive-verifier/app/circuit/matrix_evaluation.go index affb4d8d..adaa8466 100644 --- a/recursive-verifier/app/circuit/matrix_evaluation.go +++ b/recursive-verifier/app/circuit/matrix_evaluation.go @@ -1,7 +1,6 @@ package circuit import ( - "fmt" "math/big" "github.com/consensys/gnark/frontend" diff --git a/recursive-verifier/cmd/cli/main.go b/recursive-verifier/cmd/cli/main.go index 8d71cc79..b44dcf32 100644 --- a/recursive-verifier/cmd/cli/main.go +++ b/recursive-verifier/cmd/cli/main.go @@ -3,6 +3,7 @@ package main import ( "encoding/json" "fmt" + "log" "os" @@ -67,6 +68,23 @@ func main() { Required: false, Value: "", }, + &cli.StringFlag{ + Name: "spark_proof", + Usage: "Path to the spark SPARK proof file", + Required: false, + Value: "", + }, + &cli.StringFlag{ + Name: "evaluation", + Usage: "Option to directly evaluate the matrix extension or use SPARK", + Required: true, + Action: func(c *cli.Context, v string) error { + if v != "direct" && v != "spark" { + return fmt.Errorf("invalid value for --evaluation: %s (expected 'direct' or 'spark')", v) + } + return nil + }, + }, }, Action: func(c *cli.Context) error { configFilePath := c.String("config") @@ -77,6 +95,7 @@ func main() { pkUrl := c.String("pk_url") vkUrl := c.String("vk_url") r1csUrl := c.String("r1cs_url") + // sparkPath := c.String("spark_proof") configFile, err := os.ReadFile(configFilePath) if err != nil { @@ -101,6 +120,7 @@ func main() { } } + // Parse only if we use direct evaluation var r1cs circuit.R1CS if err = json.Unmarshal(r1csFile, &r1cs); err != nil { return fmt.Errorf("failed to unmarshal r1cs JSON: %w", err) diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 98d41037..708d7b9c 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -112,7 +112,6 @@ pub fn prove_spark_for_single_matrix( .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); - println!("Rowwise init final gpa"); let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -154,7 +153,6 @@ pub fn prove_spark_for_single_matrix( .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); - println!("Rowwise init final gpa"); let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -201,11 +199,6 @@ pub fn prove_spark_for_single_matrix( let init_value = memory.eq_ry.clone(); let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; - println!("{:?}", memory.eq_ry.len()); - println!("{:?}", init_address.len()); - println!("{:?}", init_value.len()); - println!("{:?}", init_timestamp.len()); - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); @@ -220,8 +213,6 @@ pub fn prove_spark_for_single_matrix( .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); - println!("Init vec{:?}", init_vec.len()); - println!("Final vec{:?}", final_vec.len()); let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); From 36598236594d9f2c8d0e35977353fb5df2ddf386 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 24 Sep 2025 08:34:59 +0800 Subject: [PATCH 11/34] Buggy: update --- recursive-verifier/cmd/cli/main.go | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/recursive-verifier/cmd/cli/main.go b/recursive-verifier/cmd/cli/main.go index b44dcf32..212aa775 100644 --- a/recursive-verifier/cmd/cli/main.go +++ b/recursive-verifier/cmd/cli/main.go @@ -69,7 +69,7 @@ func main() { Value: "", }, &cli.StringFlag{ - Name: "spark_proof", + Name: "spark_config", Usage: "Path to the spark SPARK proof file", Required: false, Value: "", @@ -95,7 +95,8 @@ func main() { pkUrl := c.String("pk_url") vkUrl := c.String("vk_url") r1csUrl := c.String("r1cs_url") - // sparkPath := c.String("spark_proof") + sparkConfigFilePath := c.String("spark_config") + evaluation := c.String("evaluation") configFile, err := os.ReadFile(configFilePath) if err != nil { @@ -107,6 +108,17 @@ func main() { return fmt.Errorf("failed to unmarshal config JSON: %w", err) } + // TODO: Only parse SPARK file if evaluation flag is set to spark + sparkConfigFile, err := os.ReadFile(sparkConfigFilePath) + if err != nil { + return fmt.Errorf("failed to read spark config file: %w", err) + } + + var sparkConfig circuit.SparkConfig + if err := json.Unmarshal(sparkConfigFile, &sparkConfig); err != nil { + return fmt.Errorf("failed to unmarshal spark config JSON: %w", err) + } + var r1csFile []byte if r1csFilePath != "" { r1csFile, err = os.ReadFile(r1csFilePath) @@ -143,7 +155,7 @@ func main() { log.Printf("No valid PK/VK url or file combo provided, generating new keys unsafely") } - if err = circuit.PrepareAndVerifyCircuit(config, r1cs, pk, vk, outputCcsPath); err != nil { + if err = circuit.PrepareAndVerifyCircuit(config, sparkConfig, r1cs, pk, vk, outputCcsPath, evaluation); err != nil { return fmt.Errorf("failed to prepare and verify circuit: %w", err) } From 745faf45e76bdfcf47e085868d874c8ec0a47031 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 24 Sep 2025 14:57:24 +0800 Subject: [PATCH 12/34] Parse initial commitments --- provekit/common/Cargo.toml | 1 + provekit/common/src/gnark.rs | 74 ++++++++++ provekit/common/src/lib.rs | 1 + recursive-verifier/app/circuit/circuit.go | 128 ++++++++++++++---- recursive-verifier/app/circuit/common.go | 4 +- recursive-verifier/app/circuit/types.go | 19 +++ spark-prover/src/bin/generate_test_request.rs | 2 +- spark-prover/src/main.rs | 20 ++- spark-prover/src/utilities/mod.rs | 11 +- tooling/provekit-gnark/src/gnark_config.rs | 74 +--------- 10 files changed, 227 insertions(+), 107 deletions(-) create mode 100644 provekit/common/src/gnark.rs diff --git a/provekit/common/Cargo.toml b/provekit/common/Cargo.toml index 26866f14..67c51b3b 100644 --- a/provekit/common/Cargo.toml +++ b/provekit/common/Cargo.toml @@ -23,6 +23,7 @@ ark-crypto-primitives.workspace = true ark-ff.workspace = true ark-serialize.workspace = true ark-std.workspace = true +ark-poly.workspace = true spongefish.workspace = true spongefish-pow.workspace = true whir.workspace = true diff --git a/provekit/common/src/gnark.rs b/provekit/common/src/gnark.rs new file mode 100644 index 00000000..4e3540eb --- /dev/null +++ b/provekit/common/src/gnark.rs @@ -0,0 +1,74 @@ +use serde::{Deserialize, Serialize}; +use crate::WhirConfig; +use ark_poly::EvaluationDomain; + +#[derive(Debug, Serialize, Deserialize)] + +pub struct WHIRConfigGnark { + /// number of rounds + pub n_rounds: usize, + /// rate + pub rate: usize, + /// number of variables + pub n_vars: usize, + /// folding factor + pub folding_factor: Vec, + /// out of domain samples + pub ood_samples: Vec, + /// number of queries + pub num_queries: Vec, + /// proof of work bits + pub pow_bits: Vec, + /// final queries + pub final_queries: usize, + /// final proof of work bits + pub final_pow_bits: i32, + /// final folding proof of work bits + pub final_folding_pow_bits: i32, + /// domain generator string + pub domain_generator: String, + /// batch size + pub batch_size: usize, +} + +impl WHIRConfigGnark { + pub fn new(whir_params: &WhirConfig) -> Self { + WHIRConfigGnark { + n_rounds: whir_params + .folding_factor + .compute_number_of_rounds(whir_params.mv_parameters.num_variables) + .0, + rate: whir_params.starting_log_inv_rate, + n_vars: whir_params.mv_parameters.num_variables, + folding_factor: (0..(whir_params + .folding_factor + .compute_number_of_rounds(whir_params.mv_parameters.num_variables) + .0)) + .map(|round| whir_params.folding_factor.at_round(round)) + .collect(), + ood_samples: whir_params + .round_parameters + .iter() + .map(|x| x.ood_samples) + .collect(), + num_queries: whir_params + .round_parameters + .iter() + .map(|x| x.num_queries) + .collect(), + pow_bits: whir_params + .round_parameters + .iter() + .map(|x| x.pow_bits as i32) + .collect(), + final_queries: whir_params.final_queries, + final_pow_bits: whir_params.final_pow_bits as i32, + final_folding_pow_bits: whir_params.final_folding_pow_bits as i32, + domain_generator: format!( + "{}", + whir_params.starting_domain.backing_domain.group_gen() + ), + batch_size: whir_params.batch_size, + } + } +} \ No newline at end of file diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs index 7eb976a2..e8358c81 100644 --- a/provekit/common/src/lib.rs +++ b/provekit/common/src/lib.rs @@ -8,6 +8,7 @@ pub mod utils; mod whir_r1cs; pub mod witness; pub mod spark; +pub mod gnark; use crate::interner::{InternedFieldElement, Interner}; pub use { diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 35bd1f9c..40157801 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -14,6 +14,8 @@ import ( "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" "github.com/consensys/gnark/std/math/uints" + gnarkNimue "github.com/reilabs/gnark-nimue" + skyscraper "github.com/reilabs/gnark-skyscraper" ) type Circuit struct { @@ -36,53 +38,94 @@ type Circuit struct { MatrixB []MatrixCell MatrixC []MatrixCell // Public Input - IO []byte + + IO []byte + UseSpark bool + SPARKTranscript []uints.U8 `gnark:",public"` + + SPARKIO []byte Transcript []uints.U8 `gnark:",public"` + WHIRA3 WHIRParams + WHIRRow WHIRParams + WHIRCol WHIRParams } func (circuit *Circuit) Define(api frontend.API) error { - sc, arthur, uapi, err := initializeComponents(api, circuit) - if err != nil { - return err - } + // sc, arthur, uapi, err := initializeComponents(api, circuit) + // if err != nil { + // return err + // } - rootHash, batchingRandomness, initialOODQueries, initialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) + // rootHash, batchingRandomness, initialOODQueries, initialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) - if err != nil { - return err - } + // if err != nil { + // return err + // } - tRand := make([]frontend.Variable, circuit.LogNumConstraints) - err = arthur.FillChallengeScalars(tRand) - if err != nil { - return err - } + // tRand := make([]frontend.Variable, circuit.LogNumConstraints) + // err = arthur.FillChallengeScalars(tRand) + // if err != nil { + // return err + // } - spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) - if err != nil { - return err - } + // spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) + // if err != nil { + // return err + // } - whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash) + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash) - if err != nil { - return err - } + // if err != nil { + // return err + // } - x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) - api.AssertIsEqual(spartanSumcheckLastValue, x) + // x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) + // api.AssertIsEqual(spartanSumcheckLastValue, x) - matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) + if circuit.UseSpark { + sc := skyscraper.NewSkyscraper(api, 2) + arthur, err := gnarkNimue.NewSkyscraperArthur(api, sc, circuit.SPARKIO, circuit.SPARKTranscript[:], true) + if err != nil { + return err + } + uapi, err := uints.New[uints.U64](api) + if err != nil { + return err + } + + sumcheckRootHash, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + rowwiseRootHash, rowwiseBatchingRandomness, rowwiseInitialOODQueries, rowwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRRow) + colwiseRootHash, colwiseBatchingRandomness, colwiseInitialOODQueries, colwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRCol) + + _ = sumcheckRootHash + _ = sumcheckBatchingRandomness + _ = sumcheckInitialOODAnswers + _ = sumcheckInitialOODQueries + + _ = rowwiseRootHash + _ = rowwiseBatchingRandomness + _ = rowwiseInitialOODAnswers + _ = rowwiseInitialOODQueries + + _ = colwiseRootHash + _ = colwiseBatchingRandomness + _ = colwiseInitialOODAnswers + _ = colwiseInitialOODQueries + + _ = uapi + } else { + // matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) - for i := 0; i < 3; i++ { - api.AssertIsEqual(matrixExtensionEvals[i], circuit.WitnessLinearStatementEvaluations[i]) + // for i := range 3 { + // api.AssertIsEqual(matrixExtensionEvals[i], circuit.WitnessLinearStatementEvaluations[i]) + // } } return nil } func verifyCircuit( - deferred []Fp256, cfg Config, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, + deferred []Fp256, cfg Config, sparkConfig SparkConfig, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, evaluation string, ) error { transcriptT := make([]uints.U8, cfg.TranscriptLen) contTranscript := make([]uints.U8, cfg.TranscriptLen) @@ -91,6 +134,13 @@ func verifyCircuit( transcriptT[i] = uints.NewU8(cfg.Transcript[i]) } + sparkTranscriptT := make([]uints.U8, len(sparkConfig.Transcript)) + sparkContTranscript := make([]uints.U8, len(sparkConfig.Transcript)) + + for i := range sparkConfig.Transcript { + sparkTranscriptT[i] = uints.NewU8(sparkConfig.Transcript[i]) + } + witnessLinearStatementEvaluations := make([]frontend.Variable, 3) hidingSpartanLinearStatementEvaluations := make([]frontend.Variable, 1) contWitnessLinearStatementEvaluations := make([]frontend.Variable, 3) @@ -148,6 +198,12 @@ func verifyCircuit( } } + useSpark := evaluation == "spark" + + // Dev + contTranscript = []uints.U8{} + transcriptT = []uints.U8{} + // var circuit = Circuit{ IO: []byte(cfg.IOPattern), Transcript: contTranscript, @@ -169,6 +225,14 @@ func verifyCircuit( MatrixA: matrixA, MatrixB: matrixB, MatrixC: matrixC, + + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkContTranscript, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + + UseSpark: useSpark, } ccs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &circuit) @@ -221,6 +285,14 @@ func verifyCircuit( MatrixA: matrixA, MatrixB: matrixB, MatrixC: matrixC, + + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkTranscriptT, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + + UseSpark: useSpark, } witness, _ := frontend.NewWitness(&assignment, ecc.BN254.ScalarField()) diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 5e681dc1..75f5e596 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -12,7 +12,7 @@ import ( arkSerialize "github.com/reilabs/go-ark-serialize" ) -func PrepareAndVerifyCircuit(config Config, r1cs R1CS, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string) error { +func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, evaluation string) error { io := gnarkNimue.IOPattern{} err := io.Parse([]byte(config.IOPattern)) if err != nil { @@ -127,7 +127,7 @@ func PrepareAndVerifyCircuit(config Config, r1cs R1CS, pk *groth16.ProvingKey, v witnessHints: witnessData, spartanHidingHint: hidingSpartanData, } - err = verifyCircuit(deferred, config, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner) + err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation) if err != nil { return fmt.Errorf("verification failed: %w", err) } diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index 67bc53b4..16cb2d81 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -125,3 +125,22 @@ type ClaimedEvaluations struct { FSums []Fp256 GSums []Fp256 } + +// type SPARKWhirParams struct { +// RowWitness WHIRConfig `json:"row"` +// ColWitness WHIRConfig `json:"col"` +// AWitness WHIRConfig `json:"a"` +// BWitness WHIRConfig `json:"b"` +// CWitness WHIRConfig `json:"c"` +// A3BatchedWitness WHIRConfig `json:"a_3batched"` +// B3BatchedWitness WHIRConfig `json:"b_3batched"` +// C3BatchedWitness WHIRConfig `json:"c_3batched"` +// } + +type SparkConfig struct { + IOPattern string `json:"io_pattern"` + Transcript []byte `json:"transcript"` + WHIRA3 WHIRConfig `json:"whir_a3"` + WHIRRow WHIRConfig `json:"whir_row"` + WHIRCol WHIRConfig `json:"whir_col"` +} diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index 37e1aaaf..d812b56a 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -7,7 +7,7 @@ fn main() { let spark_request = SPARKRequest { point_to_evaluate: Point { row: vec![FieldElement::from(0); 10], - col: vec![FieldElement::from(0); 9], + col: vec![FieldElement::from(0); 10], }, claimed_values: ClaimedValues { a: FieldElement::from(1), diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index d52567b5..dd94a61d 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,12 +1,12 @@ use { anyhow::{Context, Result}, - provekit_common::utils::next_power_of_two, + provekit_common::{file::write, utils::next_power_of_two, gnark::WHIRConfigGnark}, spark_prover::{ memory::{calculate_e_values_for_r1cs, calculate_memory}, spark::prove_spark_for_single_matrix, utilities::{ calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, - get_spark_r1cs, SPARKProof, + get_spark_r1cs, SPARKProof, SPARKProofGnark, }, whir::create_whir_configs, }, @@ -68,9 +68,25 @@ fn main() -> Result<()> { let mut spark_proof_file = File::create("spark-prover/spark_proof.json") .context("Error: Failed to create the spark proof file")?; + spark_proof_file .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) .expect("Writing gnark parameters to a file failed"); + let spark_proof_gnark = SPARKProofGnark { + transcript: spark_proof.transcript, + io_pattern: spark_proof.io_pattern, + whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), + whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), + whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), + }; + + let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") + .context("Error: Failed to create the spark proof file")?; + + gnark_spark_proof_file + .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) + .expect("Writing spark gnark parameters to a file failed"); + Ok(()) } diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index fb79a399..ae16a621 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -4,7 +4,7 @@ use { crate::whir::SPARKWHIRConfigs, anyhow::{Context, Result}, provekit_common::{ - spark::SPARKRequest, utils::{next_power_of_two, serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, WhirConfig, R1CS + gnark::WHIRConfigGnark, spark::SPARKRequest, utils::{next_power_of_two, serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, WhirConfig, R1CS }, serde::{Deserialize, Serialize}, std::fs, @@ -54,3 +54,12 @@ pub fn calculate_matrix_dimensions(r1cs: &R1CS) -> MatrixDimensions { c_nonzero_terms: r1cs.c.num_entries(), } } + +#[derive(Serialize, Deserialize)] +pub struct SPARKProofGnark { + pub transcript: Vec, + pub io_pattern: String, + pub whir_row: WHIRConfigGnark, + pub whir_col: WHIRConfigGnark, + pub whir_a3: WHIRConfigGnark, +} \ No newline at end of file diff --git a/tooling/provekit-gnark/src/gnark_config.rs b/tooling/provekit-gnark/src/gnark_config.rs index 968ca283..74588a63 100644 --- a/tooling/provekit-gnark/src/gnark_config.rs +++ b/tooling/provekit-gnark/src/gnark_config.rs @@ -1,6 +1,5 @@ use { - ark_poly::EvaluationDomain, - provekit_common::{IOPattern, WhirConfig}, + provekit_common::{gnark::WHIRConfigGnark, IOPattern, WhirConfig}, serde::{Deserialize, Serialize}, std::{fs::File, io::Write}, tracing::instrument, @@ -27,77 +26,6 @@ pub struct GnarkConfig { pub transcript_len: usize, } -#[derive(Debug, Serialize, Deserialize)] - -pub struct WHIRConfigGnark { - /// number of rounds - pub n_rounds: usize, - /// rate - pub rate: usize, - /// number of variables - pub n_vars: usize, - /// folding factor - pub folding_factor: Vec, - /// out of domain samples - pub ood_samples: Vec, - /// number of queries - pub num_queries: Vec, - /// proof of work bits - pub pow_bits: Vec, - /// final queries - pub final_queries: usize, - /// final proof of work bits - pub final_pow_bits: i32, - /// final folding proof of work bits - pub final_folding_pow_bits: i32, - /// domain generator string - pub domain_generator: String, - /// batch size - pub batch_size: usize, -} - -impl WHIRConfigGnark { - pub fn new(whir_params: &WhirConfig) -> Self { - WHIRConfigGnark { - n_rounds: whir_params - .folding_factor - .compute_number_of_rounds(whir_params.mv_parameters.num_variables) - .0, - rate: whir_params.starting_log_inv_rate, - n_vars: whir_params.mv_parameters.num_variables, - folding_factor: (0..(whir_params - .folding_factor - .compute_number_of_rounds(whir_params.mv_parameters.num_variables) - .0)) - .map(|round| whir_params.folding_factor.at_round(round)) - .collect(), - ood_samples: whir_params - .round_parameters - .iter() - .map(|x| x.ood_samples) - .collect(), - num_queries: whir_params - .round_parameters - .iter() - .map(|x| x.num_queries) - .collect(), - pow_bits: whir_params - .round_parameters - .iter() - .map(|x| x.pow_bits as i32) - .collect(), - final_queries: whir_params.final_queries, - final_pow_bits: whir_params.final_pow_bits as i32, - final_folding_pow_bits: whir_params.final_folding_pow_bits as i32, - domain_generator: format!( - "{}", - whir_params.starting_domain.backing_domain.group_gen() - ), - batch_size: whir_params.batch_size, - } - } -} - /// Writes config used for Gnark circuit to a file #[instrument(skip_all)] pub fn gnark_parameters( From dd4ebe90f2bfa193d74be5cf84e00698bfe1eeb0 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 25 Sep 2025 08:24:41 +0800 Subject: [PATCH 13/34] Buggy: current progress --- recursive-verifier/app/circuit/circuit.go | 95 +++++++++++++++---- recursive-verifier/app/circuit/common.go | 3 + recursive-verifier/app/circuit/mtUtilities.go | 8 +- recursive-verifier/app/circuit/types.go | 12 ++- spark-prover/src/main.rs | 5 + spark-prover/src/utilities/mod.rs | 1 + 6 files changed, 95 insertions(+), 29 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 40157801..05c7b5fd 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -32,7 +32,8 @@ type Circuit struct { WitnessMerkle Merkle WitnessFirstRound Merkle WHIRParamsWitness WHIRParams - WHIRParamsHidingSpartan WHIRParams + // Is this not used? + WHIRParamsHidingSpartan WHIRParams MatrixA []MatrixCell MatrixB []MatrixCell @@ -43,11 +44,13 @@ type Circuit struct { UseSpark bool SPARKTranscript []uints.U8 `gnark:",public"` - SPARKIO []byte - Transcript []uints.U8 `gnark:",public"` - WHIRA3 WHIRParams - WHIRRow WHIRParams - WHIRCol WHIRParams + SPARKIO []byte + Transcript []uints.U8 `gnark:",public"` + WHIRA3 WHIRParams + WHIRRow WHIRParams + WHIRCol WHIRParams + SparkSumcheckFirstRound Merkle + SparkSumcheckMerkle Merkle } func (circuit *Circuit) Define(api frontend.API) error { @@ -68,7 +71,6 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } - // spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) // if err != nil { // return err // } @@ -94,8 +96,45 @@ func (circuit *Circuit) Define(api frontend.API) error { } sumcheckRootHash, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) - rowwiseRootHash, rowwiseBatchingRandomness, rowwiseInitialOODQueries, rowwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRRow) - colwiseRootHash, colwiseBatchingRandomness, colwiseInitialOODQueries, colwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRCol) + if err != nil { + return err + } + rowwiseRootHash, rowwiseBatchingRandomness, rowwiseInitialOODQueries, rowwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + if err != nil { + return err + } + colwiseRootHash, colwiseBatchingRandomness, colwiseInitialOODQueries, colwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + if err != nil { + return err + } + + rowFinaltsRootHash, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRRow) + if err != nil { + return err + } + colFinaltsRootHash, colFinaltsBatchingRandomness, colFinaltsInitialOODQueries, colFinaltsInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRCol) + if err != nil { + return err + } + + api.Println(circuit.WitnessLinearStatementEvaluations[0]) + api.Println(circuit.WitnessLinearStatementEvaluations[1]) + api.Println(circuit.WitnessLinearStatementEvaluations[2]) + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, circuit.WitnessLinearStatementEvaluations[0], circuit.LogANumTerms, 4) + if err != nil { + return err + } + + _ = sparkSumcheckFoldingRandomness + _ = sparkSumcheckLastEval + + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash) + // if err != nil { + // return err + // } + // _ = whirFoldingRandomness + + // circuit.WitnessLinearStatementEvaluations[i] _ = sumcheckRootHash _ = sumcheckBatchingRandomness @@ -112,6 +151,16 @@ func (circuit *Circuit) Define(api frontend.API) error { _ = colwiseInitialOODAnswers _ = colwiseInitialOODQueries + _ = rowFinaltsRootHash + _ = rowFinaltsBatchingRandomness + _ = rowFinaltsInitialOODAnswers + _ = rowFinaltsInitialOODQueries + + _ = colFinaltsRootHash + _ = colFinaltsBatchingRandomness + _ = colFinaltsInitialOODAnswers + _ = colFinaltsInitialOODQueries + _ = uapi } else { // matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) @@ -208,8 +257,6 @@ func verifyCircuit( IO: []byte(cfg.IOPattern), Transcript: contTranscript, LogNumConstraints: cfg.LogNumConstraints, - LogNumVariables: cfg.LogNumVariables, - LogANumTerms: cfg.LogANumTerms, WitnessClaimedEvaluations: fSums, WitnessBlindingEvaluations: gSums, WitnessLinearStatementEvaluations: contWitnessLinearStatementEvaluations, @@ -226,11 +273,14 @@ func verifyCircuit( MatrixB: matrixB, MatrixC: matrixC, - SPARKIO: []byte(sparkConfig.IOPattern), - SPARKTranscript: sparkContTranscript, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - WHIRRow: NewWhirParams(sparkConfig.WHIRRow), - WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkContTranscript, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), + SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), + LogANumTerms: sparkConfig.LogANumTerms, UseSpark: useSpark, } @@ -286,11 +336,14 @@ func verifyCircuit( MatrixB: matrixB, MatrixC: matrixC, - SPARKIO: []byte(sparkConfig.IOPattern), - SPARKTranscript: sparkTranscriptT, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - WHIRRow: NewWhirParams(sparkConfig.WHIRRow), - WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkTranscriptT, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), + SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), + LogANumTerms: sparkConfig.LogANumTerms, UseSpark: useSpark, } diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 75f5e596..4b5a3b16 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -123,9 +123,12 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) + var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &merklePaths, &stirAnswers) + hints := Hints{ witnessHints: witnessData, spartanHidingHint: hidingSpartanData, + sparkSumcheckData: sparkSumcheckData, } err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation) if err != nil { diff --git a/recursive-verifier/app/circuit/mtUtilities.go b/recursive-verifier/app/circuit/mtUtilities.go index 264727b3..65e6eef5 100644 --- a/recursive-verifier/app/circuit/mtUtilities.go +++ b/recursive-verifier/app/circuit/mtUtilities.go @@ -68,9 +68,11 @@ func parseBatchedCommitment(arthur gnarkNimue.Arthur, whir_params WHIRParams) (f oodAnswers[i] = oodAnswer } - batchingRandomness := make([]frontend.Variable, 1) - if err := arthur.FillChallengeScalars(batchingRandomness); err != nil { - return nil, 0, nil, nil, err + batchingRandomness := []frontend.Variable{0} + if whir_params.BatchSize > 1 { + if err := arthur.FillChallengeScalars(batchingRandomness); err != nil { + return nil, 0, nil, nil, err + } } return rootHash[0], batchingRandomness[0], oodPoints, oodAnswers, nil } diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index 16cb2d81..9d604a99 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -104,6 +104,7 @@ type Config struct { type Hints struct { witnessHints ZKHint spartanHidingHint ZKHint + sparkSumcheckData ZKHint } type Hint struct { @@ -138,9 +139,10 @@ type ClaimedEvaluations struct { // } type SparkConfig struct { - IOPattern string `json:"io_pattern"` - Transcript []byte `json:"transcript"` - WHIRA3 WHIRConfig `json:"whir_a3"` - WHIRRow WHIRConfig `json:"whir_row"` - WHIRCol WHIRConfig `json:"whir_col"` + IOPattern string `json:"io_pattern"` + Transcript []byte `json:"transcript"` + WHIRA3 WHIRConfig `json:"whir_a3"` + WHIRRow WHIRConfig `json:"whir_row"` + WHIRCol WHIRConfig `json:"whir_col"` + LogANumTerms int `json:"log_a_num_terms"` } diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index dd94a61d..7c444d2a 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -79,6 +79,7 @@ fn main() -> Result<()> { whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), + log_a_num_terms: next_power_of_two(r1cs.a.num_entries()), }; let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") @@ -88,5 +89,9 @@ fn main() -> Result<()> { .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) .expect("Writing spark gnark parameters to a file failed"); + // println!("{:?}", request.claimed_values.a); + // println!("{:?}", request.claimed_values.b); + // println!("{:?}", request.claimed_values.c); + Ok(()) } diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index ae16a621..14b082f3 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -62,4 +62,5 @@ pub struct SPARKProofGnark { pub whir_row: WHIRConfigGnark, pub whir_col: WHIRConfigGnark, pub whir_a3: WHIRConfigGnark, + pub log_a_num_terms: usize, } \ No newline at end of file From d958f97fdac89cc8018c80a5bede736294177e99 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 25 Sep 2025 12:01:46 +0800 Subject: [PATCH 14/34] Buggy: Merkle parse fix --- recursive-verifier/app/circuit/circuit.go | 71 +++-- recursive-verifier/app/circuit/common.go | 91 +++++- recursive-verifier/app/circuit/mtUtilities.go | 24 +- recursive-verifier/app/circuit/types.go | 13 +- recursive-verifier/app/circuit/utilities.go | 5 +- recursive-verifier/app/circuit/whir.go | 280 +++++++++--------- .../app/circuit/whir_utilities.go | 11 + recursive-verifier/cmd/cli/main.go | 2 + spark-prover/src/bin/generate_test_r1cs.rs | 4 +- spark-prover/src/bin/generate_test_request.rs | 2 +- spark-prover/src/main.rs | 3 + spark-prover/src/utilities/mod.rs | 3 + 12 files changed, 338 insertions(+), 171 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 05c7b5fd..d8838e3a 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -1,6 +1,7 @@ package circuit import ( + "fmt" "log" "os" @@ -51,6 +52,8 @@ type Circuit struct { WHIRCol WHIRParams SparkSumcheckFirstRound Merkle SparkSumcheckMerkle Merkle + AClaimed frontend.Variable + SparkSumcheckLast []frontend.Variable } func (circuit *Circuit) Define(api frontend.API) error { @@ -71,15 +74,23 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, + // [][]frontend.Variable{{}, {}}, + // [][]frontend.Variable{}, + // ) + // if err != nil { // return err // } - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash) + // _ = whirFoldingRandomness - // if err != nil { - // return err - // } + // _ = rootHash + // _ = batchingRandomness + // _ = initialOODQueries + // _ = initialOODAnswers + // _ = sc + // _ = uapi // x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) // api.AssertIsEqual(spartanSumcheckLastValue, x) @@ -95,6 +106,7 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } + // TODO: create a commitment struct sumcheckRootHash, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) if err != nil { return err @@ -117,10 +129,8 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } - api.Println(circuit.WitnessLinearStatementEvaluations[0]) - api.Println(circuit.WitnessLinearStatementEvaluations[1]) - api.Println(circuit.WitnessLinearStatementEvaluations[2]) - sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, circuit.WitnessLinearStatementEvaluations[0], circuit.LogANumTerms, 4) + // After debug: Change 1 to actual claimed value + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, 1, circuit.LogANumTerms, 4) if err != nil { return err } @@ -128,11 +138,20 @@ func (circuit *Circuit) Define(api frontend.API) error { _ = sparkSumcheckFoldingRandomness _ = sparkSumcheckLastEval - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash) - // if err != nil { - // return err - // } - // _ = whirFoldingRandomness + whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, + [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}}, + [][]frontend.Variable{sparkSumcheckFoldingRandomness}, + ) + + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, + // [][]frontend.Variable{{1}, {1}}, + // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, + // ) + + if err != nil { + return err + } + _ = whirFoldingRandomness // circuit.WitnessLinearStatementEvaluations[i] @@ -174,7 +193,7 @@ func (circuit *Circuit) Define(api frontend.API) error { } func verifyCircuit( - deferred []Fp256, cfg Config, sparkConfig SparkConfig, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, evaluation string, + deferred []Fp256, cfg Config, sparkConfig SparkConfig, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, evaluation string, sparkSumcheck []Fp256, ) error { transcriptT := make([]uints.U8, cfg.TranscriptLen) contTranscript := make([]uints.U8, cfg.TranscriptLen) @@ -200,6 +219,22 @@ func verifyCircuit( witnessLinearStatementEvaluations[1] = typeConverters.LimbsToBigIntMod(deferred[2].Limbs) witnessLinearStatementEvaluations[2] = typeConverters.LimbsToBigIntMod(deferred[3].Limbs) + contSparkSumcheckLast := make([]frontend.Variable, 3) + sparkSumcheckLast := make([]frontend.Variable, 3) + fmt.Println("Eh") + fmt.Println(len(sparkSumcheck)) + // fmt.Print("Spark sumcheck", sparkSumcheck) + sparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(sparkSumcheck[0].Limbs) + sparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) + sparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) + + // a := typeConverters.LimbsToBigIntMod(sparkSumcheck[0].Limbs) + // fmt.Print(a) + // b := typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) + // fmt.Print(b) + // c := typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) + // fmt.Print(c) + fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) matrixA := make([]MatrixCell, len(internedR1CS.A.Values)) @@ -248,10 +283,6 @@ func verifyCircuit( } useSpark := evaluation == "spark" - - // Dev - contTranscript = []uints.U8{} - transcriptT = []uints.U8{} // var circuit = Circuit{ IO: []byte(cfg.IOPattern), @@ -281,6 +312,8 @@ func verifyCircuit( SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), LogANumTerms: sparkConfig.LogANumTerms, + AClaimed: sparkConfig.AClaimed, + SparkSumcheckLast: contSparkSumcheckLast, UseSpark: useSpark, } @@ -344,6 +377,8 @@ func verifyCircuit( SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), LogANumTerms: sparkConfig.LogANumTerms, + AClaimed: sparkConfig.AClaimed, + SparkSumcheckLast: sparkSumcheckLast, UseSpark: useSpark, } diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 4b5a3b16..d0ffb777 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -106,6 +106,88 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, config.Transcript = truncated + // Spark start + spark_io := gnarkNimue.IOPattern{} + err = spark_io.Parse([]byte(sparkConfig.IOPattern)) + if err != nil { + return fmt.Errorf("failed to parse IO pattern: %w", err) + } + + var spark_pointer uint64 + var spark_truncated_transcript []byte + + var sparkMerklePaths []FullMultiPath[KeccakDigest] + var sparkStirAnswers [][][]Fp256 + var sparkClaimedEvaluations []Fp256 + + for _, op := range spark_io.Ops { + switch op.Kind { + case gnarkNimue.Hint: + if spark_pointer+4 > uint64(len(sparkConfig.Transcript)) { + return fmt.Errorf("insufficient bytes for hint length") + } + hintLen := binary.LittleEndian.Uint32(sparkConfig.Transcript[spark_pointer : spark_pointer+4]) + start := spark_pointer + 4 + end := start + uint64(hintLen) + + if end > uint64(len(sparkConfig.Transcript)) { + return fmt.Errorf("insufficient bytes for merkle proof") + } + + switch string(op.Label) { + case "merkle_proof": + var path FullMultiPath[KeccakDigest] + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &path, + false, false, + ) + sparkMerklePaths = append(sparkMerklePaths, path) + + case "stir_answers": + var stirAnswersTemporary [][]Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &stirAnswersTemporary, + false, false, + ) + sparkStirAnswers = append(sparkStirAnswers, stirAnswersTemporary) + case "sumcheck_last_folds": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &sparkClaimedEvaluations, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize spark_last_folds: %w", err) + } + } + + if err != nil { + return fmt.Errorf("failed to deserialize merkle proof: %w", err) + } + + spark_pointer = end + + case gnarkNimue.Absorb: + start := spark_pointer + if string(op.Label) == "pow-nonce" { + spark_pointer += op.Size + } else { + spark_pointer += op.Size * 32 + } + + if spark_pointer > uint64(len(sparkConfig.Transcript)) { + return fmt.Errorf("absorb exceeds transcript length") + } + + spark_truncated_transcript = append(spark_truncated_transcript, sparkConfig.Transcript[start:spark_pointer]...) + } + } + + sparkConfig.Transcript = spark_truncated_transcript + // Spark end + internerBytes, err := hex.DecodeString(r1cs.Interner.Values) if err != nil { return fmt.Errorf("failed to decode interner values: %w", err) @@ -123,14 +205,19 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) - var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &merklePaths, &stirAnswers) + // Read from spark + var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ witnessHints: witnessData, spartanHidingHint: hidingSpartanData, sparkSumcheckData: sparkSumcheckData, } - err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation) + + fmt.Print(len(hints.sparkSumcheckData.firstRoundMerklePaths.path.merklePaths)) + fmt.Print(len(hints.sparkSumcheckData.roundHints.merklePaths)) + err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation, sparkClaimedEvaluations) + if err != nil { return fmt.Errorf("verification failed: %w", err) } diff --git a/recursive-verifier/app/circuit/mtUtilities.go b/recursive-verifier/app/circuit/mtUtilities.go index 65e6eef5..c7fe59d6 100644 --- a/recursive-verifier/app/circuit/mtUtilities.go +++ b/recursive-verifier/app/circuit/mtUtilities.go @@ -17,15 +17,18 @@ func initialSumcheck( initialOODAnswers []frontend.Variable, whirParams WHIRParams, linearStatementEvaluations [][]frontend.Variable, + evaluationStatementClaimedValues [][]frontend.Variable, ) (InitialSumcheckData, frontend.Variable, []frontend.Variable, error) { + lengthOfLinearStatementEvaluations := len(linearStatementEvaluations[0]) + lengthOfEvaluationStatement := len(evaluationStatementClaimedValues[0]) - initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+len(linearStatementEvaluations[0])) + initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+lengthOfLinearStatementEvaluations+lengthOfEvaluationStatement) if err != nil { return InitialSumcheckData{}, nil, nil, err } - combinedLinearStatementEvaluations := make([]frontend.Variable, len(linearStatementEvaluations[0])) //[0, 1, 2] - for evaluationIndex := range len(linearStatementEvaluations[0]) { + combinedLinearStatementEvaluations := make([]frontend.Variable, lengthOfLinearStatementEvaluations) //[0, 1, 2] + for evaluationIndex := range lengthOfLinearStatementEvaluations { sum := frontend.Variable(0) multiplier := frontend.Variable(1) for j := range len(linearStatementEvaluations) { @@ -34,7 +37,20 @@ func initialSumcheck( } combinedLinearStatementEvaluations[evaluationIndex] = sum } - OODAnswersAndStatmentEvaluations := append(initialOODAnswers, combinedLinearStatementEvaluations...) + + combinedEvaluationStatementEvaluations := make([]frontend.Variable, lengthOfEvaluationStatement) //[0, 1, 2] + for evaluationIndex := range lengthOfEvaluationStatement { + sum := frontend.Variable(0) + multiplier := frontend.Variable(1) + for j := range len(evaluationStatementClaimedValues) { + sum = api.Add(sum, api.Mul(evaluationStatementClaimedValues[j][evaluationIndex], multiplier)) + multiplier = api.Mul(multiplier, batchingRandomness) + } + combinedEvaluationStatementEvaluations[evaluationIndex] = sum + } + + OODAnswersAndStatmentEvaluations := append(append(initialOODAnswers, combinedLinearStatementEvaluations...), combinedEvaluationStatementEvaluations...) + lastEval := utilities.DotProduct(api, initialCombinationRandomness, OODAnswersAndStatmentEvaluations) initialSumcheckFoldingRandomness, lastEval, err := runWhirSumcheckRounds(api, lastEval, arthur, whirParams.FoldingFactorArray[0], 3) diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index 9d604a99..fc195c7a 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -139,10 +139,11 @@ type ClaimedEvaluations struct { // } type SparkConfig struct { - IOPattern string `json:"io_pattern"` - Transcript []byte `json:"transcript"` - WHIRA3 WHIRConfig `json:"whir_a3"` - WHIRRow WHIRConfig `json:"whir_row"` - WHIRCol WHIRConfig `json:"whir_col"` - LogANumTerms int `json:"log_a_num_terms"` + IOPattern string `json:"io_pattern"` + Transcript []byte `json:"transcript"` + WHIRA3 WHIRConfig `json:"whir_a3"` + WHIRRow WHIRConfig `json:"whir_row"` + WHIRCol WHIRConfig `json:"whir_col"` + LogANumTerms int `json:"log_a_num_terms"` + AClaimed frontend.Variable `json:"claimed_value_for_a"` } diff --git a/recursive-verifier/app/circuit/utilities.go b/recursive-verifier/app/circuit/utilities.go index 65f76d69..90198d22 100644 --- a/recursive-verifier/app/circuit/utilities.go +++ b/recursive-verifier/app/circuit/utilities.go @@ -195,7 +195,10 @@ func runZKSumcheck( lastEval, polynomialSums := unblindLastEval(api, arthur, lastEval, rhoRandomness) - _, err = RunZKWhir(api, arthur, uapi, sc, circuit.HidingSpartanMerkle, circuit.HidingSpartanFirstRound, whirParams, [][]frontend.Variable{{polynomialSums[0]}, {polynomialSums[1]}}, circuit.HidingSpartanLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash) + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.HidingSpartanMerkle, circuit.HidingSpartanFirstRound, whirParams, [][]frontend.Variable{{polynomialSums[0]}, {polynomialSums[1]}}, circuit.HidingSpartanLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, + [][]frontend.Variable{}, + [][]frontend.Variable{}, + ) if err != nil { return nil, nil, err } diff --git a/recursive-verifier/app/circuit/whir.go b/recursive-verifier/app/circuit/whir.go index b43ec3ad..9c62f756 100644 --- a/recursive-verifier/app/circuit/whir.go +++ b/recursive-verifier/app/circuit/whir.go @@ -62,12 +62,16 @@ func RunZKWhir( initialOODQueries []frontend.Variable, initialOODAnswers [][]frontend.Variable, rootHashes frontend.Variable, + + evaluationStatementClaimedValues [][]frontend.Variable, + evaluationPoints [][]frontend.Variable, + ) (totalFoldingRandomness []frontend.Variable, err error) { initialOODs := oodAnswers(api, initialOODAnswers, batchingRandomness) // batchSizeLen := whirParams.BatchSize - initialSumcheckData, lastEval, initialSumcheckFoldingRandomness, err := initialSumcheck(api, arthur, batchingRandomness, initialOODQueries, initialOODs, whirParams, linearStatementEvaluations) + initialSumcheckData, lastEval, initialSumcheckFoldingRandomness, err := initialSumcheck(api, arthur, batchingRandomness, initialOODQueries, initialOODs, whirParams, linearStatementEvaluations, evaluationStatementClaimedValues) if err != nil { return } @@ -207,6 +211,7 @@ func RunZKWhir( mainRoundData, totalFoldingRandomness, linearStatementValuesAtPoints, + evaluationPoints, ) api.AssertIsEqual( @@ -218,142 +223,143 @@ func RunZKWhir( } //nolint:unused -func runWhir( - api frontend.API, - arthur gnarkNimue.Arthur, - uapi *uints.BinaryField[uints.U64], - sc *skyscraper.Skyscraper, - circuit Merkle, - whirParams WHIRParams, - linearStatementEvaluations []frontend.Variable, - linearStatementValuesAtPoints []frontend.Variable, -) (totalFoldingRandomness []frontend.Variable, err error) { - if err = fillInAndVerifyRootHash(0, api, uapi, sc, circuit, arthur); err != nil { - return - } - - initialOODQueries, initialOODAnswers, tempErr := fillInOODPointsAndAnswers(whirParams.CommittmentOODSamples, arthur) - if tempErr != nil { - err = tempErr - return - } - - initialCombinationRandomness, tempErr := GenerateCombinationRandomness(api, arthur, whirParams.CommittmentOODSamples+len(linearStatementEvaluations)) - if tempErr != nil { - err = tempErr - return - } - - OODAnswersAndStatmentEvaluations := append(initialOODAnswers, linearStatementEvaluations...) - lastEval := utilities.DotProduct(api, initialCombinationRandomness, OODAnswersAndStatmentEvaluations) - - initialSumcheckFoldingRandomness, lastEval, tempErr := runWhirSumcheckRounds(api, lastEval, arthur, whirParams.FoldingFactorArray[0], 3) - if tempErr != nil { - err = tempErr - return - } - - initialData := InitialSumcheckData{ - InitialOODQueries: initialOODQueries, - InitialCombinationRandomness: initialCombinationRandomness, - } - - computedFold := computeFold(circuit.Leaves[0], initialSumcheckFoldingRandomness, api) - - mainRoundData := generateEmptyMainRoundData(whirParams) - - expDomainGenerator := utilities.Exponent(api, uapi, whirParams.StartingDomainBackingDomainGenerator, uints.NewU64(uint64(1< Result<()> { .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) .expect("Writing gnark parameters to a file failed"); + println!("Claimed value for A {:?}", request.claimed_values.a); //Reilabs Debug: + let spark_proof_gnark = SPARKProofGnark { transcript: spark_proof.transcript, io_pattern: spark_proof.io_pattern, @@ -80,6 +82,7 @@ fn main() -> Result<()> { whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), log_a_num_terms: next_power_of_two(r1cs.a.num_entries()), + claimed_value_for_a: request.claimed_values.a, }; let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 14b082f3..09e3e01b 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -63,4 +63,7 @@ pub struct SPARKProofGnark { pub whir_col: WHIRConfigGnark, pub whir_a3: WHIRConfigGnark, pub log_a_num_terms: usize, + // Remove once spark testing is completed + #[serde(with="serde_ark")] + pub claimed_value_for_a: FieldElement, } \ No newline at end of file From 0a690a99ba33224b9edbe08ca90186cc2626f529 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 26 Sep 2025 11:53:54 +0800 Subject: [PATCH 15/34] Fixes 3 poly --- recursive-verifier/app/circuit/circuit.go | 42 +- recursive-verifier/app/circuit/common.go | 2 - recursive-verifier/app/circuit/mtUtilities.go | 19 +- recursive-verifier/app/circuit/utilities.go | 2 +- recursive-verifier/app/circuit/whir.go | 12 - recursive-verifier/cmd/cli/main.go | 2 - spark-prover/src/bin/generate_test_r1cs.rs | 4 +- spark-prover/src/bin/generate_test_request.rs | 4 +- spark-prover/src/bin/spark-verifier.rs | 296 +++++++------- spark-prover/src/main.rs | 42 +- spark-prover/src/spark.rs | 363 +++++++++--------- spark-prover/src/utilities/iopattern/mod.rs | 304 +++++++-------- 12 files changed, 543 insertions(+), 549 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index d8838e3a..8c28f9ba 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -1,7 +1,6 @@ package circuit import ( - "fmt" "log" "os" @@ -74,6 +73,14 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } + // spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) + // if err != nil { + // return err + // } + + // _ = spartanSumcheckRand + // _ = spartanSumcheckLastValue + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, // [][]frontend.Variable{{}, {}}, // [][]frontend.Variable{}, @@ -138,22 +145,25 @@ func (circuit *Circuit) Define(api frontend.API) error { _ = sparkSumcheckFoldingRandomness _ = sparkSumcheckLastEval - whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, - [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}}, + whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, + [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, [][]frontend.Variable{sparkSumcheckFoldingRandomness}, ) - - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, - // [][]frontend.Variable{{1}, {1}}, - // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, - // ) - if err != nil { return err } - _ = whirFoldingRandomness - // circuit.WitnessLinearStatementEvaluations[i] + tauGammaTemp := make([]frontend.Variable, 2) + if err := arthur.FillChallengeScalars(tauGammaTemp); err != nil { + return err + } + tau := tauGammaTemp[0] + gamma := tauGammaTemp[1] + + _ = tau + _ = gamma + + _ = whirFoldingRandomness _ = sumcheckRootHash _ = sumcheckBatchingRandomness @@ -221,20 +231,10 @@ func verifyCircuit( contSparkSumcheckLast := make([]frontend.Variable, 3) sparkSumcheckLast := make([]frontend.Variable, 3) - fmt.Println("Eh") - fmt.Println(len(sparkSumcheck)) - // fmt.Print("Spark sumcheck", sparkSumcheck) sparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(sparkSumcheck[0].Limbs) sparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) sparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) - // a := typeConverters.LimbsToBigIntMod(sparkSumcheck[0].Limbs) - // fmt.Print(a) - // b := typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) - // fmt.Print(b) - // c := typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) - // fmt.Print(c) - fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) matrixA := make([]MatrixCell, len(internedR1CS.A.Values)) diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index d0ffb777..9501ba6c 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -214,8 +214,6 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, sparkSumcheckData: sparkSumcheckData, } - fmt.Print(len(hints.sparkSumcheckData.firstRoundMerklePaths.path.merklePaths)) - fmt.Print(len(hints.sparkSumcheckData.roundHints.merklePaths)) err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation, sparkClaimedEvaluations) if err != nil { diff --git a/recursive-verifier/app/circuit/mtUtilities.go b/recursive-verifier/app/circuit/mtUtilities.go index c7fe59d6..5f8a3f38 100644 --- a/recursive-verifier/app/circuit/mtUtilities.go +++ b/recursive-verifier/app/circuit/mtUtilities.go @@ -19,15 +19,21 @@ func initialSumcheck( linearStatementEvaluations [][]frontend.Variable, evaluationStatementClaimedValues [][]frontend.Variable, ) (InitialSumcheckData, frontend.Variable, []frontend.Variable, error) { + // ) error { lengthOfLinearStatementEvaluations := len(linearStatementEvaluations[0]) lengthOfEvaluationStatement := len(evaluationStatementClaimedValues[0]) initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+lengthOfLinearStatementEvaluations+lengthOfEvaluationStatement) + // initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+lengthOfLinearStatementEvaluations) if err != nil { + // return nil return InitialSumcheckData{}, nil, nil, err } - combinedLinearStatementEvaluations := make([]frontend.Variable, lengthOfLinearStatementEvaluations) //[0, 1, 2] + _ = initialCombinationRandomness + _ = lengthOfEvaluationStatement + + combinedLinearStatementEvaluations := make([]frontend.Variable, lengthOfLinearStatementEvaluations) for evaluationIndex := range lengthOfLinearStatementEvaluations { sum := frontend.Variable(0) multiplier := frontend.Variable(1) @@ -38,7 +44,7 @@ func initialSumcheck( combinedLinearStatementEvaluations[evaluationIndex] = sum } - combinedEvaluationStatementEvaluations := make([]frontend.Variable, lengthOfEvaluationStatement) //[0, 1, 2] + combinedEvaluationStatementEvaluations := make([]frontend.Variable, lengthOfEvaluationStatement) for evaluationIndex := range lengthOfEvaluationStatement { sum := frontend.Variable(0) multiplier := frontend.Variable(1) @@ -49,19 +55,28 @@ func initialSumcheck( combinedEvaluationStatementEvaluations[evaluationIndex] = sum } + api.Println(combinedEvaluationStatementEvaluations) + OODAnswersAndStatmentEvaluations := append(append(initialOODAnswers, combinedLinearStatementEvaluations...), combinedEvaluationStatementEvaluations...) + // OODAnswersAndStatmentEvaluations := append(initialOODAnswers, combinedLinearStatementEvaluations...) lastEval := utilities.DotProduct(api, initialCombinationRandomness, OODAnswersAndStatmentEvaluations) + // _ = lastEval + initialSumcheckFoldingRandomness, lastEval, err := runWhirSumcheckRounds(api, lastEval, arthur, whirParams.FoldingFactorArray[0], 3) if err != nil { + // return nil return InitialSumcheckData{}, nil, nil, err } + _ = initialSumcheckFoldingRandomness + return InitialSumcheckData{ InitialOODQueries: initialOODQueries, InitialCombinationRandomness: initialCombinationRandomness, }, lastEval, initialSumcheckFoldingRandomness, nil + } func parseBatchedCommitment(arthur gnarkNimue.Arthur, whir_params WHIRParams) (frontend.Variable, frontend.Variable, []frontend.Variable, [][]frontend.Variable, error) { diff --git a/recursive-verifier/app/circuit/utilities.go b/recursive-verifier/app/circuit/utilities.go index 90198d22..16ab8f28 100644 --- a/recursive-verifier/app/circuit/utilities.go +++ b/recursive-verifier/app/circuit/utilities.go @@ -196,7 +196,7 @@ func runZKSumcheck( lastEval, polynomialSums := unblindLastEval(api, arthur, lastEval, rhoRandomness) _, err = RunZKWhir(api, arthur, uapi, sc, circuit.HidingSpartanMerkle, circuit.HidingSpartanFirstRound, whirParams, [][]frontend.Variable{{polynomialSums[0]}, {polynomialSums[1]}}, circuit.HidingSpartanLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, - [][]frontend.Variable{}, + [][]frontend.Variable{{}, {}}, [][]frontend.Variable{}, ) if err != nil { diff --git a/recursive-verifier/app/circuit/whir.go b/recursive-verifier/app/circuit/whir.go index 9c62f756..f5bbde77 100644 --- a/recursive-verifier/app/circuit/whir.go +++ b/recursive-verifier/app/circuit/whir.go @@ -69,24 +69,12 @@ func RunZKWhir( ) (totalFoldingRandomness []frontend.Variable, err error) { initialOODs := oodAnswers(api, initialOODAnswers, batchingRandomness) - // batchSizeLen := whirParams.BatchSize initialSumcheckData, lastEval, initialSumcheckFoldingRandomness, err := initialSumcheck(api, arthur, batchingRandomness, initialOODQueries, initialOODs, whirParams, linearStatementEvaluations, evaluationStatementClaimedValues) if err != nil { return } - copyOfFirstLeaves := make([][][]frontend.Variable, len(firstRound.Leaves)) - for i := range len(firstRound.Leaves) { - copyOfFirstLeaves[i] = make([][]frontend.Variable, len(firstRound.Leaves[i])) - for j := range len(firstRound.Leaves[i]) { - copyOfFirstLeaves[i][j] = make([]frontend.Variable, len(firstRound.Leaves[i][j])) - for k := range len(firstRound.Leaves[i][j]) { - copyOfFirstLeaves[i][j][k] = firstRound.Leaves[i][j][k] - } - } - } - roundAnswers := make([][][]frontend.Variable, len(circuit.Leaves)+1) foldSize := 1 << whirParams.FoldingFactorArray[0] diff --git a/recursive-verifier/cmd/cli/main.go b/recursive-verifier/cmd/cli/main.go index ee4368a3..212aa775 100644 --- a/recursive-verifier/cmd/cli/main.go +++ b/recursive-verifier/cmd/cli/main.go @@ -119,8 +119,6 @@ func main() { return fmt.Errorf("failed to unmarshal spark config JSON: %w", err) } - fmt.Print(sparkConfig.AClaimed) - var r1csFile []byte if r1csFilePath != "" { r1csFile, err = os.ReadFile(r1csFilePath) diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs index 0edfa37b..41e402f9 100644 --- a/spark-prover/src/bin/generate_test_r1cs.rs +++ b/spark-prover/src/bin/generate_test_r1cs.rs @@ -5,10 +5,10 @@ use { fn main() { let mut r1cs = R1CS::new(); - r1cs.grow_matrices(512, 512); + r1cs.grow_matrices(256, 256); let interned_1 = r1cs.interner.intern(FieldElement::from(1)); - for i in 0..512 { + for i in 0..256 { r1cs.a.set(i, i, interned_1); r1cs.b.set(i, i, interned_1); r1cs.c.set(i, i, interned_1); diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index 35957a36..37ab5075 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -6,8 +6,8 @@ use { fn main() { let spark_request = SPARKRequest { point_to_evaluate: Point { - row: vec![FieldElement::from(0); 9], - col: vec![FieldElement::from(0); 10], + row: vec![FieldElement::from(0); 8], + col: vec![FieldElement::from(0); 9], }, claimed_values: ClaimedValues { a: FieldElement::from(1), diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 2770800f..2e50d054 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -50,29 +50,29 @@ fn main() -> Result<()> { &request.claimed_values.a, )?; - verify_spark_single_matrix( - &spark_proof.whir_params.row, - &spark_proof.whir_params.col, - &spark_proof.whir_params.b_3batched, - spark_proof.matrix_dimensions.num_rows, - spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.b_nonzero_terms, - &mut arthur, - &request, - &request.claimed_values.b, - )?; - - verify_spark_single_matrix( - &spark_proof.whir_params.row, - &spark_proof.whir_params.col, - &spark_proof.whir_params.c_3batched, - spark_proof.matrix_dimensions.num_rows, - spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.c_nonzero_terms, - &mut arthur, - &request, - &request.claimed_values.c, - )?; + // verify_spark_single_matrix( + // &spark_proof.whir_params.row, + // &spark_proof.whir_params.col, + // &spark_proof.whir_params.b_3batched, + // spark_proof.matrix_dimensions.num_rows, + // spark_proof.matrix_dimensions.num_cols, + // spark_proof.matrix_dimensions.b_nonzero_terms, + // &mut arthur, + // &request, + // &request.claimed_values.b, + // )?; + + // verify_spark_single_matrix( + // &spark_proof.whir_params.row, + // &spark_proof.whir_params.col, + // &spark_proof.whir_params.c_3batched, + // spark_proof.matrix_dimensions.num_rows, + // spark_proof.matrix_dimensions.num_cols, + // spark_proof.matrix_dimensions.c_nonzero_terms, + // &mut arthur, + // &request, + // &request.claimed_values.c, + // )?; Ok(()) } @@ -136,178 +136,178 @@ pub fn verify_spark_single_matrix( let tau = tau_and_gamma[0]; let gamma = tau_and_gamma[1]; - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_rows) + 2, - )?; + // let gpa_result = gpa_sumcheck_verifier( + // arthur, + // next_power_of_two(num_rows) + 2, + // )?; - let claimed_init = gpa_result.claimed_values[0]; - let claimed_final = gpa_result.claimed_values[1]; + // let claimed_init = gpa_result.claimed_values[0]; + // let claimed_final = gpa_result.claimed_values[1]; - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - let init_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); - let init_cntr = FieldElement::from(0); + // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let init_mem = calculate_eq( + // &request.point_to_evaluate.row, + // &evaluation_randomness.to_vec(), + // ); + // let init_cntr = FieldElement::from(0); - let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - let final_cntr: FieldElement = arthur.hint()?; + // let final_cntr: FieldElement = arthur.hint()?; - let mut final_cntr_statement = - Statement::::new(next_power_of_two(num_rows)); - final_cntr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - final_cntr, - ); + // let mut final_cntr_statement = + // Statement::::new(next_power_of_two(num_rows)); + // final_cntr_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // final_cntr, + // ); - let final_cntr_verifier = Verifier::new(row_config); - final_cntr_verifier - .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; + // let final_cntr_verifier = Verifier::new(row_config); + // final_cntr_verifier + // .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) + // .context("while verifying WHIR")?; - let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - let final_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); + // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let final_mem = calculate_eq( + // &request.point_to_evaluate.row, + // &evaluation_randomness.to_vec(), + // ); - let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - + final_opening * last_randomness[0]; + // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + // + final_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_nonzero_terms) + 2, - )?; + // let gpa_result = gpa_sumcheck_verifier( + // arthur, + // next_power_of_two(num_nonzero_terms) + 2, + // )?; - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - let claimed_rs = gpa_result.claimed_values[0]; - let claimed_ws = gpa_result.claimed_values[1]; + // let claimed_rs = gpa_result.claimed_values[0]; + // let claimed_ws = gpa_result.claimed_values[1]; - let rs_adr: FieldElement = arthur.hint()?; - let rs_mem: FieldElement = arthur.hint()?; - let rs_timestamp: FieldElement = arthur.hint()?; + // let rs_adr: FieldElement = arthur.hint()?; + // let rs_mem: FieldElement = arthur.hint()?; + // let rs_timestamp: FieldElement = arthur.hint()?; - let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + // let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - + ws_opening * last_randomness[0]; + // let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + // + ws_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( - num_nonzero_terms, - )); + // let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( + // num_nonzero_terms, + // )); - a_spark_rowwise_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr + - rs_mem * a_rowwise_commitment.batching_randomness + - rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, - ); + // a_spark_rowwise_statement_verifier.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // rs_adr + + // rs_mem * a_rowwise_commitment.batching_randomness + + // rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, + // ); - a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + // a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; - ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + // ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); - // Matrix A - Colwise + // // Matrix A - Colwise - let mut tau_and_gamma = [FieldElement::from(0); 2]; - arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; + // let mut tau_and_gamma = [FieldElement::from(0); 2]; + // arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + // let tau = tau_and_gamma[0]; + // let gamma = tau_and_gamma[1]; - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_cols) + 2, - )?; + // let gpa_result = gpa_sumcheck_verifier( + // arthur, + // next_power_of_two(num_cols) + 2, + // )?; - let claimed_init = gpa_result.claimed_values[0]; - let claimed_final = gpa_result.claimed_values[1]; + // let claimed_init = gpa_result.claimed_values[0]; + // let claimed_final = gpa_result.claimed_values[1]; - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - let init_mem = calculate_eq( - &request.point_to_evaluate.col[1..], - &evaluation_randomness.to_vec(), - ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - let init_cntr = FieldElement::from(0); + // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let init_mem = calculate_eq( + // &request.point_to_evaluate.col[1..], + // &evaluation_randomness.to_vec(), + // ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); + // let init_cntr = FieldElement::from(0); - let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - let final_cntr: FieldElement = arthur.hint()?; + // let final_cntr: FieldElement = arthur.hint()?; - let mut final_cntr_statement = - Statement::::new(next_power_of_two(num_cols)); - final_cntr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - final_cntr, - ); + // let mut final_cntr_statement = + // Statement::::new(next_power_of_two(num_cols)); + // final_cntr_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // final_cntr, + // ); - let final_cntr_verifier = Verifier::new(col_config); - final_cntr_verifier - .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; + // let final_cntr_verifier = Verifier::new(col_config); + // final_cntr_verifier + // .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) + // .context("while verifying WHIR")?; - let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - let final_mem = calculate_eq( - &request.point_to_evaluate.col[1..], - &evaluation_randomness.to_vec(), - ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); + // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + // let final_mem = calculate_eq( + // &request.point_to_evaluate.col[1..], + // &evaluation_randomness.to_vec(), + // ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - + final_opening * last_randomness[0]; + // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + // + final_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_nonzero_terms) + 2, - )?; + // let gpa_result = gpa_sumcheck_verifier( + // arthur, + // next_power_of_two(num_nonzero_terms) + 2, + // )?; - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - let claimed_rs = gpa_result.claimed_values[0]; - let claimed_ws = gpa_result.claimed_values[1]; + // let claimed_rs = gpa_result.claimed_values[0]; + // let claimed_ws = gpa_result.claimed_values[1]; - let rs_adr: FieldElement = arthur.hint()?; - let rs_mem: FieldElement = arthur.hint()?; - let rs_timestamp: FieldElement = arthur.hint()?; + // let rs_adr: FieldElement = arthur.hint()?; + // let rs_mem: FieldElement = arthur.hint()?; + // let rs_timestamp: FieldElement = arthur.hint()?; - let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + // let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - + ws_opening * last_randomness[0]; + // let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + // + ws_opening * last_randomness[0]; - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( - num_nonzero_terms, - )); + // let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( + // num_nonzero_terms, + // )); - a_spark_colwise_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr + - rs_mem * a_colwise_commitment.batching_randomness + - rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, - ); + // a_spark_colwise_statement_verifier.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + // rs_adr + + // rs_mem * a_colwise_commitment.batching_randomness + + // rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, + // ); - a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + // a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; - ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + // ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); Ok(()) } diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 21a737e2..8c83623c 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -39,25 +39,25 @@ fn main() -> Result<()> { &spark_whir_configs.a_3batched, )?; - prove_spark_for_single_matrix( - &mut merlin, - spark_r1cs.b, - &memory, - e_values.b, - request.claimed_values.b, - &spark_whir_configs, - &spark_whir_configs.b_3batched, - )?; + // prove_spark_for_single_matrix( + // &mut merlin, + // spark_r1cs.b, + // &memory, + // e_values.b, + // request.claimed_values.b, + // &spark_whir_configs, + // &spark_whir_configs.b_3batched, + // )?; - prove_spark_for_single_matrix( - &mut merlin, - spark_r1cs.c, - &memory, - e_values.c, - request.claimed_values.c, - &spark_whir_configs, - &spark_whir_configs.c_3batched, - )?; + // prove_spark_for_single_matrix( + // &mut merlin, + // spark_r1cs.c, + // &memory, + // e_values.c, + // request.claimed_values.c, + // &spark_whir_configs, + // &spark_whir_configs.c_3batched, + // )?; let spark_proof = SPARKProof { transcript: merlin.narg_string().to_vec(), @@ -73,8 +73,6 @@ fn main() -> Result<()> { .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) .expect("Writing gnark parameters to a file failed"); - println!("Claimed value for A {:?}", request.claimed_values.a); //Reilabs Debug: - let spark_proof_gnark = SPARKProofGnark { transcript: spark_proof.transcript, io_pattern: spark_proof.io_pattern, @@ -92,9 +90,5 @@ fn main() -> Result<()> { .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) .expect("Writing spark gnark parameters to a file failed"); - // println!("{:?}", request.claimed_values.a); - // println!("{:?}", request.claimed_values.b); - // println!("{:?}", request.claimed_values.c); - Ok(()) } diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 708d7b9c..b51c545f 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -74,216 +74,217 @@ pub fn prove_spark_for_single_matrix( let claimed_batched_value = sumcheck_final_folds[0] + - sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + + sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + sumcheck_final_folds[2] * sumcheck_witness.batching_randomness * sumcheck_witness.batching_randomness; + println!("{:?}", claimed_batched_value); sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); let sumcheck_prover = Prover::new(batched_config.clone()); sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; - // Rowwise + // // Rowwise - // Rowwise Init Final GPA + // // Rowwise Init Final GPA - let mut tau_and_gamma = [FieldElement::from(0); 2]; - merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; + // let mut tau_and_gamma = [FieldElement::from(0); 2]; + // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + // let tau = tau_and_gamma[0]; + // let gamma = tau_and_gamma[1]; - let init_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let init_value = memory.eq_rx.clone(); - let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; + // let init_address: Vec = (0..memory.eq_rx.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let init_value = memory.eq_rx.clone(); + // let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); + // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); - let final_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let final_value = memory.eq_rx.clone(); - let final_timestamp = matrix.timestamps.final_row.clone(); + // let final_address: Vec = (0..memory.eq_rx.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let final_value = memory.eq_rx.clone(); + // let final_timestamp = matrix.timestamps.final_row.clone(); - let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); + // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); - let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&final_row_eval)?; - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_row_eval, - whir_configs.row.clone(), - final_row_ts_witness, - )?; - - // Rowwise RS WS GPA - - let rs_address = matrix.coo.row.clone(); - let rs_value = e_values.e_rx.clone(); - let rs_timestamp = matrix.timestamps.read_row.clone(); - - let rs_vec: Vec = - izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let ws_address = matrix.coo.row.clone(); - let ws_value = e_values.e_rx.clone(); - let ws_timestamp: Vec = matrix - .timestamps - .read_row - .into_iter() - .map(|a| a + FieldElement::from(1)) - .collect(); - - let ws_vec: Vec = - izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let rs_address_eval = EvaluationsList::new(rs_address) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_address_eval)?; + // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&final_row_eval)?; + + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // final_row_eval, + // whir_configs.row.clone(), + // final_row_ts_witness, + // )?; + + // // Rowwise RS WS GPA + + // let rs_address = matrix.coo.row.clone(); + // let rs_value = e_values.e_rx.clone(); + // let rs_timestamp = matrix.timestamps.read_row.clone(); + + // let rs_vec: Vec = + // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let ws_address = matrix.coo.row.clone(); + // let ws_value = e_values.e_rx.clone(); + // let ws_timestamp: Vec = matrix + // .timestamps + // .read_row + // .into_iter() + // .map(|a| a + FieldElement::from(1)) + // .collect(); + + // let ws_vec: Vec = + // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // let rs_address_eval = EvaluationsList::new(rs_address) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_address_eval)?; - let rs_value_eval = EvaluationsList::new(rs_value) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_value_eval)?; + // let rs_value_eval = EvaluationsList::new(rs_value) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_value_eval)?; - let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_timestamp_eval)?; + // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_timestamp_eval)?; - let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); + // let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); - let claimed_rowwise_eval = - rs_address_eval + - rs_value_eval * rowwise_witness.batching_randomness + - rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; + // let claimed_rowwise_eval = + // rs_address_eval + + // rs_value_eval * rowwise_witness.batching_randomness + + // rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; - assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + // assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - rowwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); + // rowwise_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); - sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; - - // Colwise - - // Colwise Init Final GPA - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let init_address: Vec = (0..memory.eq_ry.len() as u64) - .map(FieldElement::from) - .collect(); - let init_value = memory.eq_ry.clone(); - let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; - - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let final_address: Vec = (0..memory.eq_ry.len() as u64) - .map(FieldElement::from) - .collect(); - let final_value = memory.eq_ry.clone(); - let final_timestamp = matrix.timestamps.final_col.clone(); - - let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&final_col_eval)?; - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_col_eval, - whir_configs.col.clone(), - final_col_ts_witness, - )?; - - // Colwise RS WS GPA - - let rs_address = matrix.coo.col.clone(); - let rs_value = e_values.e_ry.clone(); - let rs_timestamp = matrix.timestamps.read_col.clone(); - - let rs_vec: Vec = - izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let ws_address = matrix.coo.col.clone(); - let ws_value = e_values.e_ry.clone(); - let ws_timestamp: Vec = matrix - .timestamps - .read_col - .into_iter() - .map(|a| a + FieldElement::from(1)) - .collect(); - - let ws_vec: Vec = - izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let rs_address_eval = EvaluationsList::new(rs_address) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_address_eval)?; + // let sumcheck_prover = Prover::new(batched_config.clone()); + // sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + + // // Colwise + + // // Colwise Init Final GPA + + // let mut tau_and_gamma = [FieldElement::from(0); 2]; + // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + // let tau = tau_and_gamma[0]; + // let gamma = tau_and_gamma[1]; + + // let init_address: Vec = (0..memory.eq_ry.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let init_value = memory.eq_ry.clone(); + // let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; + + // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let final_address: Vec = (0..memory.eq_ry.len() as u64) + // .map(FieldElement::from) + // .collect(); + // let final_value = memory.eq_ry.clone(); + // let final_timestamp = matrix.timestamps.final_col.clone(); + + // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + + // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&final_col_eval)?; + + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // final_col_eval, + // whir_configs.col.clone(), + // final_col_ts_witness, + // )?; + + // // Colwise RS WS GPA + + // let rs_address = matrix.coo.col.clone(); + // let rs_value = e_values.e_ry.clone(); + // let rs_timestamp = matrix.timestamps.read_col.clone(); + + // let rs_vec: Vec = + // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let ws_address = matrix.coo.col.clone(); + // let ws_value = e_values.e_ry.clone(); + // let ws_timestamp: Vec = matrix + // .timestamps + // .read_col + // .into_iter() + // .map(|a| a + FieldElement::from(1)) + // .collect(); + + // let ws_vec: Vec = + // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + // .collect(); + + // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + + // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + // let rs_address_eval = EvaluationsList::new(rs_address) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_address_eval)?; - let rs_value_eval = EvaluationsList::new(rs_value) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_value_eval)?; + // let rs_value_eval = EvaluationsList::new(rs_value) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_value_eval)?; - let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_timestamp_eval)?; + // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + // merlin.hint(&rs_timestamp_eval)?; - let mut colwise_statement = Statement::::new(evaluation_randomness.len()); + // let mut colwise_statement = Statement::::new(evaluation_randomness.len()); - let claimed_colwise_eval = - rs_address_eval + - rs_value_eval * colwise_witness.batching_randomness + - rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; + // let claimed_colwise_eval = + // rs_address_eval + + // rs_value_eval * colwise_witness.batching_randomness + + // rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; - assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + // assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - colwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); + // colwise_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); - sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; + // let sumcheck_prover = Prover::new(batched_config.clone()); + // sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 78480f42..15816650 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -45,170 +45,170 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { // Rowwise - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.a.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("Row final counter claimed evaluation") - .add_whir_proof(&configs.row); - - for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.a_3batched); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.a.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("Col final counter claimed evaluation") - .add_whir_proof(&configs.col); - - for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.a_3batched); - - // Matrix B - - io = io - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.row) - .commit_statement(&configs.col) - .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.b_3batched); + // io = io.add_tau_and_gamma(); + + // for i in 0..=next_power_of_two(r1cs.a.num_rows) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("Row final counter claimed evaluation") + // .add_whir_proof(&configs.row); + + // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.a_3batched); + + // // Colwise + + // io = io.add_tau_and_gamma(); + + // for i in 0..=next_power_of_two(r1cs.a.num_cols) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("Col final counter claimed evaluation") + // .add_whir_proof(&configs.col); + + // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.a_3batched); + + // // Matrix B + + // io = io + // .commit_statement(&configs.b_3batched) + // .commit_statement(&configs.b_3batched) + // .commit_statement(&configs.b_3batched) + // .commit_statement(&configs.row) + // .commit_statement(&configs.col) + // .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) + // .hint("sumcheck_last_folds") + // .add_whir_proof(&configs.b_3batched); - // Rowwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.b.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("Row final counter claimed evaluation") - .add_whir_proof(&configs.row); - - for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.b_3batched); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.b.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("Col final counter claimed evaluation") - .add_whir_proof(&configs.col); - - for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.b_3batched); - - // Matrix C - - io = io - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.row) - .commit_statement(&configs.col) - .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.c_3batched); + // // Rowwise + + // io = io.add_tau_and_gamma(); + + // for i in 0..=next_power_of_two(r1cs.b.num_rows) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("Row final counter claimed evaluation") + // .add_whir_proof(&configs.row); + + // for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.b_3batched); + + // // Colwise + + // io = io.add_tau_and_gamma(); + + // for i in 0..=next_power_of_two(r1cs.b.num_cols) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("Col final counter claimed evaluation") + // .add_whir_proof(&configs.col); + + // for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } + + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.b_3batched); + + // // Matrix C + + // io = io + // .commit_statement(&configs.c_3batched) + // .commit_statement(&configs.c_3batched) + // .commit_statement(&configs.c_3batched) + // .commit_statement(&configs.row) + // .commit_statement(&configs.col) + // .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) + // .hint("sumcheck_last_folds") + // .add_whir_proof(&configs.c_3batched); - // Rowwise + // // Rowwise - io = io.add_tau_and_gamma(); + // io = io.add_tau_and_gamma(); - for i in 0..=next_power_of_two(r1cs.c.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.c.num_rows) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("Row final counter claimed evaluation") - .add_whir_proof(&configs.row); + // io = io + // .hint("Row final counter claimed evaluation") + // .add_whir_proof(&configs.row); - for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.c_3batched); + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.c_3batched); - // Colwise + // // Colwise - io = io.add_tau_and_gamma(); + // io = io.add_tau_and_gamma(); - for i in 0..=next_power_of_two(r1cs.c.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.c.num_cols) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("Col final counter claimed evaluation") - .add_whir_proof(&configs.col); + // io = io + // .hint("Col final counter claimed evaluation") + // .add_whir_proof(&configs.col); - for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } + // for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + // io = io.add_sumcheck_polynomials(i); + // io = io.add_line(); + // } - io = io - .hint("RS address claimed evaluation") - .hint("RS value claimed evaluation") - .hint("RS timestamp claimed evaluation") - .add_whir_proof(&configs.c_3batched); + // io = io + // .hint("RS address claimed evaluation") + // .hint("RS value claimed evaluation") + // .hint("RS timestamp claimed evaluation") + // .add_whir_proof(&configs.c_3batched); io } From cdd2c10ec2ec8d78d64083adf3b489bdbb05714c Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 26 Sep 2025 12:43:16 +0800 Subject: [PATCH 16/34] Buggy: current --- recursive-verifier/app/circuit/circuit.go | 95 +++++++++++++++++++-- spark-prover/src/bin/spark-verifier.rs | 27 +++--- spark-prover/src/spark.rs | 55 ++++++------ spark-prover/src/utilities/iopattern/mod.rs | 14 +-- 4 files changed, 135 insertions(+), 56 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 8c28f9ba..64c9034a 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -145,13 +145,13 @@ func (circuit *Circuit) Define(api frontend.API) error { _ = sparkSumcheckFoldingRandomness _ = sparkSumcheckLastEval - whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, - [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, - [][]frontend.Variable{sparkSumcheckFoldingRandomness}, - ) - if err != nil { - return err - } + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, + // [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, + // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, + // ) + // if err != nil { + // return err + // } tauGammaTemp := make([]frontend.Variable, 2) if err := arthur.FillChallengeScalars(tauGammaTemp); err != nil { @@ -160,10 +160,17 @@ func (circuit *Circuit) Define(api frontend.API) error { tau := tauGammaTemp[0] gamma := tauGammaTemp[1] + gpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + _ = gpaResult + _ = tau _ = gamma - _ = whirFoldingRandomness + // _ = whirFoldingRandomness _ = sumcheckRootHash _ = sumcheckBatchingRandomness @@ -407,3 +414,75 @@ func parseClaimedEvaluations(claimedEvaluations ClaimedEvaluations, isContainer return fSums, gSums } + +func gpaSumcheckVerifier( + api frontend.API, + arthur gnarkNimue.Arthur, + layerCount int, +) (GPASumcheckResult, error) { + l := make([]frontend.Variable, 2) + r := make([]frontend.Variable, 1) + + // gpaClaimedProduct := make([]frontend.Variable, 1) + // err := arthur.FillNextScalars(gpaClaimedProduct) + // if err != nil { + // return GPASumcheckResult{}, err + // } + // lastEval := gpaClaimedProduct[0] + gpaClaimedValues := make([]frontend.Variable, 2) + err := arthur.FillNextScalars(gpaClaimedValues) + if err != nil { + return GPASumcheckResult{}, err + } + err = arthur.FillChallengeScalars(r) + if err != nil { + return GPASumcheckResult{}, err + } + lastEval := utilities.UnivarPoly(api, gpaClaimedValues, r)[0] + prevRand := r + var rand []frontend.Variable + + for i := 1; i < (layerCount - 1); i++ { + rand, lastEval, err = runSumcheck( + api, + arthur, + lastEval, + i, + 4, + ) + api.Println(lastEval) + if err != nil { + return GPASumcheckResult{}, err + } + + err = arthur.FillNextScalars(l) + if err != nil { + return GPASumcheckResult{}, err + } + err = arthur.FillChallengeScalars(r) + if err != nil { + return GPASumcheckResult{}, err + } + claimedLastSch := api.Mul( + calculateEQ(api, prevRand, rand), + utilities.UnivarPoly(api, l, []frontend.Variable{0})[0], + utilities.UnivarPoly(api, l, []frontend.Variable{1})[0], + ) + api.Println(claimedLastSch) + api.AssertIsEqual(claimedLastSch, lastEval) + prevRand = append(rand, r[0]) + lastEval = utilities.UnivarPoly(api, l, []frontend.Variable{r[0]})[0] + } + + return GPASumcheckResult{ + claimedProducts: gpaClaimedValues, + lastSumcheckValue: lastEval, + randomness: prevRand, + }, nil +} + +type GPASumcheckResult struct { + claimedProducts []frontend.Variable + lastSumcheckValue frontend.Variable + randomness []frontend.Variable +} diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 2e50d054..fe497b8e 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -119,15 +119,15 @@ pub fn verify_spark_single_matrix( num_nonzero_terms, )); - a_spark_sumcheck_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0] + - final_folds[1] * a_sumcheck_commitment.batching_randomness + - final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, - ); + // a_spark_sumcheck_statement_verifier.add_constraint( + // Weights::evaluation(MultilinearPoint(randomness.clone())), + // final_folds[0] + + // final_folds[1] * a_sumcheck_commitment.batching_randomness + + // final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, + // ); - let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); - a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; + // let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + // a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; // Matrix A - Rowwise @@ -136,10 +136,10 @@ pub fn verify_spark_single_matrix( let tau = tau_and_gamma[0]; let gamma = tau_and_gamma[1]; - // let gpa_result = gpa_sumcheck_verifier( - // arthur, - // next_power_of_two(num_rows) + 2, - // )?; + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_rows) + 2, + )?; // let claimed_init = gpa_result.claimed_values[0]; // let claimed_final = gpa_result.claimed_values[1]; @@ -359,7 +359,6 @@ pub fn gpa_sumcheck_verifier( .fill_challenge_scalars(&mut r) .expect("Failed to fill next scalars"); let mut a_last_sumcheck_value = eval_linear_poly(&claimed_values, &r[0]); - rand.push(r[0]); prev_rand = rand; rand = Vec::::new(); @@ -380,6 +379,7 @@ pub fn gpa_sumcheck_verifier( rand.push(alpha[0]); a_last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); } + println!("{:?}", a_last_sumcheck_value); arthur .fill_next_scalars(&mut l) .expect("Failed to fill next scalars"); @@ -389,6 +389,7 @@ pub fn gpa_sumcheck_verifier( let claimed_last_sch = calculate_eq(&prev_rand, &rand) * eval_linear_poly(&l, &FieldElement::from(0)) * eval_linear_poly(&l, &FieldElement::from(1)); + println!("{:?}", claimed_last_sch); assert_eq!(claimed_last_sch, a_last_sumcheck_value); rand.push(r[0]); prev_rand = rand; diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index b51c545f..dd1429f4 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -77,43 +77,42 @@ pub fn prove_spark_for_single_matrix( sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + sumcheck_final_folds[2] * sumcheck_witness.batching_randomness * sumcheck_witness.batching_randomness; - println!("{:?}", claimed_batched_value); - sumcheck_statement.add_constraint( - Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); + // sumcheck_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover::new(batched_config.clone()); - sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; + // let sumcheck_prover = Prover::new(batched_config.clone()); + // sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; - // // Rowwise + // Rowwise - // // Rowwise Init Final GPA + // Rowwise Init Final GPA - // let mut tau_and_gamma = [FieldElement::from(0); 2]; - // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - // let tau = tau_and_gamma[0]; - // let gamma = tau_and_gamma[1]; + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; - // let init_address: Vec = (0..memory.eq_rx.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let init_value = memory.eq_rx.clone(); - // let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; + let init_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let init_value = memory.eq_rx.clone(); + let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; - // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let final_address: Vec = (0..memory.eq_rx.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let final_value = memory.eq_rx.clone(); - // let final_timestamp = matrix.timestamps.final_row.clone(); + let final_address: Vec = (0..memory.eq_rx.len() as u64) + .map(FieldElement::from) + .collect(); + let final_value = memory.eq_rx.clone(); + let final_timestamp = matrix.timestamps.final_row.clone(); - // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 15816650..7dc79b96 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -40,17 +40,17 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .commit_statement(&configs.row) .commit_statement(&configs.col) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.a_3batched); + .hint("sumcheck_last_folds"); + // .add_whir_proof(&configs.a_3batched); // Rowwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.a.num_rows) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.a.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } // io = io // .hint("Row final counter claimed evaluation") From cf2ccb158801309e69a2c715b84c2488b93b4c59 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 26 Sep 2025 17:59:25 +0800 Subject: [PATCH 17/34] Adds partial RS WS --- recursive-verifier/app/circuit/circuit.go | 196 +++++++++++++----- recursive-verifier/app/circuit/common.go | 71 ++++++- recursive-verifier/app/circuit/mt.go | 6 + recursive-verifier/app/circuit/mtUtilities.go | 27 ++- recursive-verifier/app/circuit/types.go | 44 +++- recursive-verifier/app/circuit/utilities.go | 4 +- recursive-verifier/app/circuit/whir.go | 19 +- spark-prover/src/bin/spark-verifier.rs | 109 +++++----- spark-prover/src/main.rs | 2 + spark-prover/src/spark.rs | 90 ++++---- spark-prover/src/utilities/iopattern/mod.rs | 24 ++- 11 files changed, 393 insertions(+), 199 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 64c9034a..2aea4787 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -44,15 +44,22 @@ type Circuit struct { UseSpark bool SPARKTranscript []uints.U8 `gnark:",public"` - SPARKIO []byte - Transcript []uints.U8 `gnark:",public"` - WHIRA3 WHIRParams - WHIRRow WHIRParams - WHIRCol WHIRParams - SparkSumcheckFirstRound Merkle - SparkSumcheckMerkle Merkle - AClaimed frontend.Variable - SparkSumcheckLast []frontend.Variable + SPARKIO []byte + Transcript []uints.U8 `gnark:",public"` + WHIRA3 WHIRParams + WHIRRow WHIRParams + WHIRCol WHIRParams + + SparkSumcheckLast []frontend.Variable + + SparkA SPARKMatrixData + + SparkSumcheckFirstRound Merkle + SparkSumcheckMerkle Merkle + RowFinalMerkleFirstRound Merkle + RowFinalMerkle Merkle + RowwiseMerkleFirstRound Merkle + RowwiseMerkle Merkle } func (circuit *Circuit) Define(api frontend.API) error { @@ -113,25 +120,24 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } - // TODO: create a commitment struct - sumcheckRootHash, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + sumcheckCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) if err != nil { return err } - rowwiseRootHash, rowwiseBatchingRandomness, rowwiseInitialOODQueries, rowwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + rowwiseCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) if err != nil { return err } - colwiseRootHash, colwiseBatchingRandomness, colwiseInitialOODQueries, colwiseInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRA3) + colwiseCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) if err != nil { return err } - rowFinaltsRootHash, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRRow) + rowFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRRow) if err != nil { return err } - colFinaltsRootHash, colFinaltsBatchingRandomness, colFinaltsInitialOODQueries, colFinaltsInitialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRCol) + colFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRCol) if err != nil { return err } @@ -153,6 +159,8 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } + // Rowwise + tauGammaTemp := make([]frontend.Variable, 2) if err := arthur.FillChallengeScalars(tauGammaTemp); err != nil { return err @@ -160,42 +168,98 @@ func (circuit *Circuit) Define(api frontend.API) error { tau := tauGammaTemp[0] gamma := tauGammaTemp[1] + // Change this debug statement gpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) if err != nil { return err } - _ = gpaResult + claimedInit := gpaResult.claimedProducts[0] + claimedFinal := gpaResult.claimedProducts[1] + + last_randomness := gpaResult.randomness[0] + evaluation_randomness := gpaResult.randomness[1:] + + addr := CalculateAdr(api, evaluation_randomness) + mem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, evaluation_randomness) + init_cntr := 0 + + init_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), init_cntr), tau) + + // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowFinalMerkle, circuit.RowFinalMerkleFirstRound, circuit.WHIRRow, [][]frontend.Variable{{}}, []frontend.Variable{}, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, rowFinaltsRootHash, + // [][]frontend.Variable{{circuit.SparkA.RowFinalCounter}}, + // [][]frontend.Variable{evaluation_randomness}, + // ) + + final_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), circuit.SparkA.RowFinalCounter), tau) + + evaluated_value := api.Add(api.Mul(init_opening, api.Sub(1, last_randomness)), api.Mul(final_opening, last_randomness)) + + api.AssertIsEqual(gpaResult.lastSumcheckValue, evaluated_value) + + // Change this after debug + gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + claimedRS := gpaResultRSWS.claimedProducts[0] + claimedWS := gpaResultRSWS.claimedProducts[1] + + rsws_last_randomness := gpaResultRSWS.randomness[0] + rsws_evaluation_randomness := gpaResultRSWS.randomness[1:] + + rs_opening := api.Sub(api.Add(api.Mul(circuit.SparkA.RowRSAddressEvaluation, gamma, gamma), api.Mul(circuit.SparkA.RowRSValueEvaluation, gamma), circuit.SparkA.RowRSTimestampEvaluation), tau) + ws_opening := api.Sub(api.Add(api.Mul(circuit.SparkA.RowRSAddressEvaluation, gamma, gamma), api.Mul(circuit.SparkA.RowRSValueEvaluation, gamma), circuit.SparkA.RowRSTimestampEvaluation, 1), tau) + + rsws_evaluated_value := api.Add(api.Mul(rs_opening, api.Sub(1, rsws_last_randomness)), api.Mul(ws_opening, rsws_last_randomness)) + + api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) + + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowwiseMerkle, circuit.RowwiseMerkleFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, + [][]frontend.Variable{{circuit.SparkA.RowRSAddressEvaluation}, {circuit.SparkA.RowRSValueEvaluation}, {circuit.SparkA.RowRSTimestampEvaluation}}, + [][]frontend.Variable{rsws_evaluation_randomness}, + ) + + api.AssertIsEqual(api.Mul(claimedInit, claimedWS), api.Mul(claimedRS, claimedFinal)) + + _ = rsws_evaluation_randomness - _ = tau - _ = gamma + _ = gpaResultRSWS + _ = claimedInit + _ = claimedFinal + _ = sumcheckCommitment + _ = colwiseCommitment + _ = rowwiseCommitment + _ = rowFinalCommitment + _ = colFinalCommitment // _ = whirFoldingRandomness - _ = sumcheckRootHash - _ = sumcheckBatchingRandomness - _ = sumcheckInitialOODAnswers - _ = sumcheckInitialOODQueries + // _ = sumcheckRootHash + // _ = sumcheckBatchingRandomness + // _ = sumcheckInitialOODAnswers + // _ = sumcheckInitialOODQueries - _ = rowwiseRootHash - _ = rowwiseBatchingRandomness - _ = rowwiseInitialOODAnswers - _ = rowwiseInitialOODQueries + // _ = rowwiseRootHash + // _ = rowwiseBatchingRandomness + // _ = rowwiseInitialOODAnswers + // _ = rowwiseInitialOODQueries - _ = colwiseRootHash - _ = colwiseBatchingRandomness - _ = colwiseInitialOODAnswers - _ = colwiseInitialOODQueries + // _ = colwiseRootHash + // _ = colwiseBatchingRandomness + // _ = colwiseInitialOODAnswers + // _ = colwiseInitialOODQueries - _ = rowFinaltsRootHash - _ = rowFinaltsBatchingRandomness - _ = rowFinaltsInitialOODAnswers - _ = rowFinaltsInitialOODQueries + // _ = rowFinaltsRootHash + // _ = rowFinaltsBatchingRandomness + // _ = rowFinaltsInitialOODAnswers + // _ = rowFinaltsInitialOODQueries - _ = colFinaltsRootHash - _ = colFinaltsBatchingRandomness - _ = colFinaltsInitialOODAnswers - _ = colFinaltsInitialOODQueries + // _ = colFinaltsRootHash + // _ = colFinaltsBatchingRandomness + // _ = colFinaltsInitialOODAnswers + // _ = colFinaltsInitialOODQueries _ = uapi } else { @@ -242,6 +306,11 @@ func verifyCircuit( sparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) sparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) + rowFinalCounter := typeConverters.LimbsToBigIntMod(hints.rowFinalCounter[0].Limbs) + rowRSAddressEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSAddressEvaluation[0].Limbs) + rowRSValueEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSValueEvaluation[0].Limbs) + rowRSTimestampEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSTimestampEvaluation[0].Limbs) + fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) matrixA := make([]MatrixCell, len(internedR1CS.A.Values)) @@ -319,9 +388,21 @@ func verifyCircuit( SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), LogANumTerms: sparkConfig.LogANumTerms, - AClaimed: sparkConfig.AClaimed, SparkSumcheckLast: contSparkSumcheckLast, + RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, true), + RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, true), + + RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), + RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, true), + + SparkA: SPARKMatrixData{ + RowFinalCounter: rowFinalCounter, + RowRSAddressEvaluation: rowRSAddressEvaluation, + RowRSValueEvaluation: rowRSValueEvaluation, + RowRSTimestampEvaluation: rowRSTimestampEvaluation, + }, + UseSpark: useSpark, } @@ -384,9 +465,21 @@ func verifyCircuit( SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), LogANumTerms: sparkConfig.LogANumTerms, - AClaimed: sparkConfig.AClaimed, SparkSumcheckLast: sparkSumcheckLast, + RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, false), + RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, false), + + RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), + RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, false), + + SparkA: SPARKMatrixData{ + RowFinalCounter: rowFinalCounter, + RowRSAddressEvaluation: rowRSAddressEvaluation, + RowRSValueEvaluation: rowRSValueEvaluation, + RowRSTimestampEvaluation: rowRSTimestampEvaluation, + }, + UseSpark: useSpark, } @@ -423,12 +516,6 @@ func gpaSumcheckVerifier( l := make([]frontend.Variable, 2) r := make([]frontend.Variable, 1) - // gpaClaimedProduct := make([]frontend.Variable, 1) - // err := arthur.FillNextScalars(gpaClaimedProduct) - // if err != nil { - // return GPASumcheckResult{}, err - // } - // lastEval := gpaClaimedProduct[0] gpaClaimedValues := make([]frontend.Variable, 2) err := arthur.FillNextScalars(gpaClaimedValues) if err != nil { @@ -439,7 +526,7 @@ func gpaSumcheckVerifier( return GPASumcheckResult{}, err } lastEval := utilities.UnivarPoly(api, gpaClaimedValues, r)[0] - prevRand := r + prevRand := []frontend.Variable{r[0]} var rand []frontend.Variable for i := 1; i < (layerCount - 1); i++ { @@ -450,7 +537,6 @@ func gpaSumcheckVerifier( i, 4, ) - api.Println(lastEval) if err != nil { return GPASumcheckResult{}, err } @@ -468,7 +554,6 @@ func gpaSumcheckVerifier( utilities.UnivarPoly(api, l, []frontend.Variable{0})[0], utilities.UnivarPoly(api, l, []frontend.Variable{1})[0], ) - api.Println(claimedLastSch) api.AssertIsEqual(claimedLastSch, lastEval) prevRand = append(rand, r[0]) lastEval = utilities.UnivarPoly(api, l, []frontend.Variable{r[0]})[0] @@ -486,3 +571,16 @@ type GPASumcheckResult struct { lastSumcheckValue frontend.Variable randomness []frontend.Variable } + +func CalculateAdr(api frontend.API, coefficients []frontend.Variable) frontend.Variable { + ans := frontend.Variable(0) + for _, coefficient := range coefficients { + ans = api.Add(api.Mul(ans, 2), coefficient) + } + + return ans +} + +func sparkSingleMatrix() { + +} diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 9501ba6c..f4100392 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -119,6 +119,10 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var sparkMerklePaths []FullMultiPath[KeccakDigest] var sparkStirAnswers [][][]Fp256 var sparkClaimedEvaluations []Fp256 + var rowFinalCounter []Fp256 + var rowRSAddressEvaluation []Fp256 + var rowRSValueEvaluation []Fp256 + var rowRSTimestampEvaluation []Fp256 for _, op := range spark_io.Ops { switch op.Kind { @@ -161,6 +165,50 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, if err != nil { return fmt.Errorf("failed to deserialize spark_last_folds: %w", err) } + case "row_final_counter_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_final_counter_claimed_evaluation : %w", err) + } + rowFinalCounter = append(rowFinalCounter, temp) + case "row_rs_address_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } + rowRSAddressEvaluation = append(rowRSAddressEvaluation, temp) + case "row_rs_value_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_value_claimed_evaluation : %w", err) + } + rowRSValueEvaluation = append(rowRSValueEvaluation, temp) + case "row_rs_timestamp_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_timestamp_claimed_evaluation : %w", err) + } + rowRSTimestampEvaluation = append(rowRSTimestampEvaluation, temp) } if err != nil { @@ -206,12 +254,27 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) // Read from spark - var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + + sparkSumcheckData := ZKHint{} + rowFinal := ZKHint{} + // var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + // fmt.Println("Aa", len(sparkMerklePaths)) + // var rowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) + // fmt.Println("Aa1", len(sparkMerklePaths)) + + // fmt.Print("Len", len(rowFinal.firstRoundMerklePaths.path.merklePaths)) + var rowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ - witnessHints: witnessData, - spartanHidingHint: hidingSpartanData, - sparkSumcheckData: sparkSumcheckData, + witnessHints: witnessData, + spartanHidingHint: hidingSpartanData, + sparkSumcheckData: sparkSumcheckData, + rowFinalMerkle: rowFinal, + rowFinalCounter: rowFinalCounter, + rowRSAddressEvaluation: rowRSAddressEvaluation, + rowRSValueEvaluation: rowRSValueEvaluation, + rowRSTimestampEvaluation: rowRSTimestampEvaluation, + rowwiseSparkMerkle: rowwiseSparkMerkle, } err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation, sparkClaimedEvaluations) diff --git a/recursive-verifier/app/circuit/mt.go b/recursive-verifier/app/circuit/mt.go index 4930c399..ce5b41d0 100644 --- a/recursive-verifier/app/circuit/mt.go +++ b/recursive-verifier/app/circuit/mt.go @@ -7,6 +7,12 @@ import ( "github.com/consensys/gnark/std/math/uints" ) +func newCombinedMerkle(hint ZKHint, isContainer bool) CombinedMerkle { + return CombinedMerkle{ + firstRound: newMerkle(hint.firstRoundMerklePaths.path, isContainer), + mainRounds: newMerkle(hint.roundHints, isContainer), + } +} func newMerkle( hint Hint, isContainer bool, diff --git a/recursive-verifier/app/circuit/mtUtilities.go b/recursive-verifier/app/circuit/mtUtilities.go index 5f8a3f38..1e73f0ba 100644 --- a/recursive-verifier/app/circuit/mtUtilities.go +++ b/recursive-verifier/app/circuit/mtUtilities.go @@ -19,14 +19,11 @@ func initialSumcheck( linearStatementEvaluations [][]frontend.Variable, evaluationStatementClaimedValues [][]frontend.Variable, ) (InitialSumcheckData, frontend.Variable, []frontend.Variable, error) { - // ) error { lengthOfLinearStatementEvaluations := len(linearStatementEvaluations[0]) lengthOfEvaluationStatement := len(evaluationStatementClaimedValues[0]) initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+lengthOfLinearStatementEvaluations+lengthOfEvaluationStatement) - // initialCombinationRandomness, err := GenerateCombinationRandomness(api, arthur, len(initialOODAnswers)+lengthOfLinearStatementEvaluations) if err != nil { - // return nil return InitialSumcheckData{}, nil, nil, err } @@ -55,18 +52,12 @@ func initialSumcheck( combinedEvaluationStatementEvaluations[evaluationIndex] = sum } - api.Println(combinedEvaluationStatementEvaluations) - OODAnswersAndStatmentEvaluations := append(append(initialOODAnswers, combinedLinearStatementEvaluations...), combinedEvaluationStatementEvaluations...) - // OODAnswersAndStatmentEvaluations := append(initialOODAnswers, combinedLinearStatementEvaluations...) lastEval := utilities.DotProduct(api, initialCombinationRandomness, OODAnswersAndStatmentEvaluations) - // _ = lastEval - initialSumcheckFoldingRandomness, lastEval, err := runWhirSumcheckRounds(api, lastEval, arthur, whirParams.FoldingFactorArray[0], 3) if err != nil { - // return nil return InitialSumcheckData{}, nil, nil, err } @@ -79,22 +70,22 @@ func initialSumcheck( } -func parseBatchedCommitment(arthur gnarkNimue.Arthur, whir_params WHIRParams) (frontend.Variable, frontend.Variable, []frontend.Variable, [][]frontend.Variable, error) { +func parseBatchedCommitment(arthur gnarkNimue.Arthur, whir_params WHIRParams) (Commitment, error) { rootHash := make([]frontend.Variable, 1) if err := arthur.FillNextScalars(rootHash); err != nil { - return nil, nil, nil, [][]frontend.Variable{}, err + return Commitment{}, err } oodPoints := make([]frontend.Variable, 1) oodAnswers := make([][]frontend.Variable, whir_params.BatchSize) if err := arthur.FillChallengeScalars(oodPoints); err != nil { - return nil, nil, nil, nil, err + return Commitment{}, err } for i := range whir_params.BatchSize { oodAnswer := make([]frontend.Variable, 1) if err := arthur.FillNextScalars(oodAnswer); err != nil { - return nil, nil, nil, nil, err + return Commitment{}, err } oodAnswers[i] = oodAnswer } @@ -102,10 +93,16 @@ func parseBatchedCommitment(arthur gnarkNimue.Arthur, whir_params WHIRParams) (f batchingRandomness := []frontend.Variable{0} if whir_params.BatchSize > 1 { if err := arthur.FillChallengeScalars(batchingRandomness); err != nil { - return nil, 0, nil, nil, err + return Commitment{}, err } } - return rootHash[0], batchingRandomness[0], oodPoints, oodAnswers, nil + return Commitment{ + rootHash: rootHash[0], + batchingRandomness: batchingRandomness[0], + initialOODQueries: oodPoints, + initialOODAnswers: oodAnswers, + }, nil + } func generateFinalCoefficientsAndRandomnessPoints(api frontend.API, arthur gnarkNimue.Arthur, whir_params WHIRParams, circuit Merkle, uapi *uints.BinaryField[uints.U64], sc *skyscraper.Skyscraper, domainSize int, expDomainGenerator frontend.Variable) ([]frontend.Variable, []frontend.Variable, error) { diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index fc195c7a..44eebf22 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -83,6 +83,11 @@ type Merkle struct { AuthPaths [][][]frontend.Variable } +type CombinedMerkle struct { + firstRound Merkle + mainRounds Merkle +} + // Other types type ProofObject struct { StatementValuesAtRandomPoint []Fp256 `json:"statement_values_at_random_point"` @@ -102,9 +107,15 @@ type Config struct { } type Hints struct { - witnessHints ZKHint - spartanHidingHint ZKHint - sparkSumcheckData ZKHint + witnessHints ZKHint + spartanHidingHint ZKHint + sparkSumcheckData ZKHint + rowFinalMerkle ZKHint + rowFinalCounter []Fp256 + rowRSAddressEvaluation []Fp256 + rowRSValueEvaluation []Fp256 + rowRSTimestampEvaluation []Fp256 + rowwiseSparkMerkle ZKHint } type Hint struct { @@ -139,11 +150,24 @@ type ClaimedEvaluations struct { // } type SparkConfig struct { - IOPattern string `json:"io_pattern"` - Transcript []byte `json:"transcript"` - WHIRA3 WHIRConfig `json:"whir_a3"` - WHIRRow WHIRConfig `json:"whir_row"` - WHIRCol WHIRConfig `json:"whir_col"` - LogANumTerms int `json:"log_a_num_terms"` - AClaimed frontend.Variable `json:"claimed_value_for_a"` + IOPattern string `json:"io_pattern"` + Transcript []byte `json:"transcript"` + WHIRA3 WHIRConfig `json:"whir_a3"` + WHIRRow WHIRConfig `json:"whir_row"` + WHIRCol WHIRConfig `json:"whir_col"` + LogANumTerms int `json:"log_a_num_terms"` +} + +type Commitment struct { + rootHash frontend.Variable + batchingRandomness frontend.Variable + initialOODQueries []frontend.Variable + initialOODAnswers [][]frontend.Variable +} + +type SPARKMatrixData struct { + RowFinalCounter frontend.Variable + RowRSAddressEvaluation frontend.Variable + RowRSValueEvaluation frontend.Variable + RowRSTimestampEvaluation frontend.Variable } diff --git a/recursive-verifier/app/circuit/utilities.go b/recursive-verifier/app/circuit/utilities.go index 16ab8f28..205acf40 100644 --- a/recursive-verifier/app/circuit/utilities.go +++ b/recursive-verifier/app/circuit/utilities.go @@ -176,7 +176,7 @@ func runZKSumcheck( whirParams WHIRParams, ) ([]frontend.Variable, frontend.Variable, error) { - rootHash, batchingRandomness, initialOODQueries, initialOODAnswers, err := parseBatchedCommitment(arthur, whirParams) + commitment, err := parseBatchedCommitment(arthur, whirParams) if err != nil { return nil, nil, err } @@ -195,7 +195,7 @@ func runZKSumcheck( lastEval, polynomialSums := unblindLastEval(api, arthur, lastEval, rhoRandomness) - _, err = RunZKWhir(api, arthur, uapi, sc, circuit.HidingSpartanMerkle, circuit.HidingSpartanFirstRound, whirParams, [][]frontend.Variable{{polynomialSums[0]}, {polynomialSums[1]}}, circuit.HidingSpartanLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.HidingSpartanMerkle, circuit.HidingSpartanFirstRound, whirParams, [][]frontend.Variable{{polynomialSums[0]}, {polynomialSums[1]}}, circuit.HidingSpartanLinearStatementEvaluations, commitment, [][]frontend.Variable{{}, {}}, [][]frontend.Variable{}, ) diff --git a/recursive-verifier/app/circuit/whir.go b/recursive-verifier/app/circuit/whir.go index f5bbde77..b9c3dc47 100644 --- a/recursive-verifier/app/circuit/whir.go +++ b/recursive-verifier/app/circuit/whir.go @@ -58,19 +58,22 @@ func RunZKWhir( whirParams WHIRParams, linearStatementEvaluations [][]frontend.Variable, linearStatementValuesAtPoints []frontend.Variable, - batchingRandomness frontend.Variable, - initialOODQueries []frontend.Variable, - initialOODAnswers [][]frontend.Variable, - rootHashes frontend.Variable, + + commitment Commitment, + + // batchingRandomness frontend.Variable, + // initialOODQueries []frontend.Variable, + // initialOODAnswers [][]frontend.Variable, + // rootHashes frontend.Variable, evaluationStatementClaimedValues [][]frontend.Variable, evaluationPoints [][]frontend.Variable, ) (totalFoldingRandomness []frontend.Variable, err error) { - initialOODs := oodAnswers(api, initialOODAnswers, batchingRandomness) + initialOODs := oodAnswers(api, commitment.initialOODAnswers, commitment.batchingRandomness) - initialSumcheckData, lastEval, initialSumcheckFoldingRandomness, err := initialSumcheck(api, arthur, batchingRandomness, initialOODQueries, initialOODs, whirParams, linearStatementEvaluations, evaluationStatementClaimedValues) + initialSumcheckData, lastEval, initialSumcheckFoldingRandomness, err := initialSumcheck(api, arthur, commitment.batchingRandomness, commitment.initialOODQueries, initialOODs, whirParams, linearStatementEvaluations, evaluationStatementClaimedValues) if err != nil { return } @@ -78,7 +81,7 @@ func RunZKWhir( roundAnswers := make([][][]frontend.Variable, len(circuit.Leaves)+1) foldSize := 1 << whirParams.FoldingFactorArray[0] - collapsed := rlcBatchedLeaves(api, firstRound.Leaves[0], foldSize, whirParams.BatchSize, batchingRandomness) + collapsed := rlcBatchedLeaves(api, firstRound.Leaves[0], foldSize, whirParams.BatchSize, commitment.batchingRandomness) roundAnswers[0] = collapsed for i := range len(circuit.Leaves) { @@ -122,7 +125,7 @@ func RunZKWhir( if err != nil { return } - err = verifyMerkleTreeProofs(api, uapi, sc, firstRound.LeafIndexes[0], firstRound.Leaves[0], firstRound.LeafSiblingHashes[0], firstRound.AuthPaths[0], rootHashes) + err = verifyMerkleTreeProofs(api, uapi, sc, firstRound.LeafIndexes[0], firstRound.Leaves[0], firstRound.LeafSiblingHashes[0], firstRound.AuthPaths[0], commitment.rootHash) if err != nil { return } diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index fe497b8e..fea48c4c 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -126,7 +126,7 @@ pub fn verify_spark_single_matrix( // final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, // ); - // let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); // a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; // Matrix A - Rowwise @@ -141,83 +141,84 @@ pub fn verify_spark_single_matrix( next_power_of_two(num_rows) + 2, )?; - // let claimed_init = gpa_result.claimed_values[0]; - // let claimed_final = gpa_result.claimed_values[1]; + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let init_mem = calculate_eq( - // &request.point_to_evaluate.row, - // &evaluation_randomness.to_vec(), - // ); - // let init_cntr = FieldElement::from(0); + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); + let init_cntr = FieldElement::from(0); - // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + let final_cntr: FieldElement = arthur.hint()?; - // let final_cntr: FieldElement = arthur.hint()?; + println!("Final cntr {:?}", final_cntr); //Reilabs Debug: - // let mut final_cntr_statement = - // Statement::::new(next_power_of_two(num_rows)); - // final_cntr_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // final_cntr, - // ); + let mut final_cntr_statement = + Statement::::new(next_power_of_two(num_rows)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); // let final_cntr_verifier = Verifier::new(row_config); // final_cntr_verifier // .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) // .context("while verifying WHIR")?; - // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let final_mem = calculate_eq( - // &request.point_to_evaluate.row, - // &evaluation_randomness.to_vec(), - // ); + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = calculate_eq( + &request.point_to_evaluate.row, + &evaluation_randomness.to_vec(), + ); - // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - // + final_opening * last_randomness[0]; + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; - // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - // let gpa_result = gpa_sumcheck_verifier( - // arthur, - // next_power_of_two(num_nonzero_terms) + 2, - // )?; + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_nonzero_terms) + 2, + )?; - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - // let claimed_rs = gpa_result.claimed_values[0]; - // let claimed_ws = gpa_result.claimed_values[1]; + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; - // let rs_adr: FieldElement = arthur.hint()?; - // let rs_mem: FieldElement = arthur.hint()?; - // let rs_timestamp: FieldElement = arthur.hint()?; + let rs_adr: FieldElement = arthur.hint()?; + let rs_mem: FieldElement = arthur.hint()?; + let rs_timestamp: FieldElement = arthur.hint()?; - // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - // let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - // let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - // + ws_opening * last_randomness[0]; + let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + + ws_opening * last_randomness[0]; - // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - // let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( - // num_nonzero_terms, - // )); + let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( + num_nonzero_terms, + )); - // a_spark_rowwise_statement_verifier.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // rs_adr + - // rs_mem * a_rowwise_commitment.batching_randomness + - // rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, - // ); + a_spark_rowwise_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_adr + + rs_mem * a_rowwise_commitment.batching_randomness + + rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, + ); - // a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; - // ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); // // Matrix A - Colwise @@ -379,7 +380,6 @@ pub fn gpa_sumcheck_verifier( rand.push(alpha[0]); a_last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); } - println!("{:?}", a_last_sumcheck_value); arthur .fill_next_scalars(&mut l) .expect("Failed to fill next scalars"); @@ -389,7 +389,6 @@ pub fn gpa_sumcheck_verifier( let claimed_last_sch = calculate_eq(&prev_rand, &rand) * eval_linear_poly(&l, &FieldElement::from(0)) * eval_linear_poly(&l, &FieldElement::from(1)); - println!("{:?}", claimed_last_sch); assert_eq!(claimed_last_sch, a_last_sumcheck_value); rand.push(r[0]); prev_rand = rand; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 8c83623c..2db13a3f 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -86,6 +86,8 @@ fn main() -> Result<()> { let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") .context("Error: Failed to create the spark proof file")?; + println!("IO{:?}", spark_proof_gnark.io_pattern); //Reilabs Debug: + gnark_spark_proof_file .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) .expect("Writing spark gnark parameters to a file failed"); diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index dd1429f4..980f8ab0 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -114,11 +114,11 @@ pub fn prove_spark_for_single_matrix( let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - // let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&final_row_eval)?; + let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&final_row_eval)?; // produce_whir_proof( // merlin, @@ -130,59 +130,59 @@ pub fn prove_spark_for_single_matrix( // // Rowwise RS WS GPA - // let rs_address = matrix.coo.row.clone(); - // let rs_value = e_values.e_rx.clone(); - // let rs_timestamp = matrix.timestamps.read_row.clone(); + let rs_address = matrix.coo.row.clone(); + let rs_value = e_values.e_rx.clone(); + let rs_timestamp = matrix.timestamps.read_row.clone(); + + let rs_vec: Vec = + izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); + + let ws_address = matrix.coo.row.clone(); + let ws_value = e_values.e_rx.clone(); + let ws_timestamp: Vec = matrix + .timestamps + .read_row + .into_iter() + .map(|a| a + FieldElement::from(1)) + .collect(); - // let rs_vec: Vec = - // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let ws_vec: Vec = + izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let ws_address = matrix.coo.row.clone(); - // let ws_value = e_values.e_rx.clone(); - // let ws_timestamp: Vec = matrix - // .timestamps - // .read_row - // .into_iter() - // .map(|a| a + FieldElement::from(1)) - // .collect(); + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - // let ws_vec: Vec = - // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - // let rs_address_eval = EvaluationsList::new(rs_address) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_address_eval)?; + let rs_address_eval = EvaluationsList::new(rs_address) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_address_eval)?; - // let rs_value_eval = EvaluationsList::new(rs_value) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_value_eval)?; + let rs_value_eval = EvaluationsList::new(rs_value) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_value_eval)?; - // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_timestamp_eval)?; + let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_timestamp_eval)?; - // let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); + let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); - // let claimed_rowwise_eval = - // rs_address_eval + - // rs_value_eval * rowwise_witness.batching_randomness + - // rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; + let claimed_rowwise_eval = + rs_address_eval + + rs_value_eval * rowwise_witness.batching_randomness + + rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; // assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - // rowwise_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); + rowwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - // let sumcheck_prover = Prover::new(batched_config.clone()); - // sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + let sumcheck_prover = Prover::new(batched_config.clone()); + sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // // Colwise diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 7dc79b96..ec67abb7 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -52,20 +52,22 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io.add_line(); } - // io = io - // .hint("Row final counter claimed evaluation") + io = io + .hint("row_final_counter_claimed_evaluation"); // .add_whir_proof(&configs.row); - // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + // Can I send all hints once in struct? - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.a_3batched); + for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("row_rs_address_claimed_evaluation") + .hint("row_rs_value_claimed_evaluation") + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.a_3batched); // // Colwise From 9be039842a9e781ab9ee135a7eaefb64f333b78b Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Mon, 29 Sep 2025 17:55:17 +0800 Subject: [PATCH 18/34] Single matrix --- recursive-verifier/app/circuit/circuit.go | 421 ++++++++++++-------- recursive-verifier/app/circuit/common.go | 76 +++- recursive-verifier/app/circuit/types.go | 37 +- spark-prover/src/bin/spark-verifier.rs | 138 ++++--- spark-prover/src/spark.rs | 150 +++---- spark-prover/src/utilities/iopattern/mod.rs | 34 +- 6 files changed, 528 insertions(+), 328 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 2aea4787..d882eb49 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -58,8 +58,6 @@ type Circuit struct { SparkSumcheckMerkle Merkle RowFinalMerkleFirstRound Merkle RowFinalMerkle Merkle - RowwiseMerkleFirstRound Merkle - RowwiseMerkle Merkle } func (circuit *Circuit) Define(api frontend.API) error { @@ -120,148 +118,17 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } - sumcheckCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) - if err != nil { - return err - } - rowwiseCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) - if err != nil { - return err - } - colwiseCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRA3) - if err != nil { - return err - } - - rowFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRRow) - if err != nil { - return err - } - colFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRCol) - if err != nil { - return err - } - - // After debug: Change 1 to actual claimed value - sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, 1, circuit.LogANumTerms, 4) - if err != nil { - return err - } - - _ = sparkSumcheckFoldingRandomness - _ = sparkSumcheckLastEval - - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, - // [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, - // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, - // ) - // if err != nil { - // return err - // } - - // Rowwise - - tauGammaTemp := make([]frontend.Variable, 2) - if err := arthur.FillChallengeScalars(tauGammaTemp); err != nil { - return err - } - tau := tauGammaTemp[0] - gamma := tauGammaTemp[1] - - // Change this debug statement - gpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) - if err != nil { - return err - } - - claimedInit := gpaResult.claimedProducts[0] - claimedFinal := gpaResult.claimedProducts[1] - - last_randomness := gpaResult.randomness[0] - evaluation_randomness := gpaResult.randomness[1:] - - addr := CalculateAdr(api, evaluation_randomness) - mem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, evaluation_randomness) - init_cntr := 0 - - init_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), init_cntr), tau) - - // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowFinalMerkle, circuit.RowFinalMerkleFirstRound, circuit.WHIRRow, [][]frontend.Variable{{}}, []frontend.Variable{}, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, rowFinaltsRootHash, - // [][]frontend.Variable{{circuit.SparkA.RowFinalCounter}}, - // [][]frontend.Variable{evaluation_randomness}, - // ) - - final_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), circuit.SparkA.RowFinalCounter), tau) - - evaluated_value := api.Add(api.Mul(init_opening, api.Sub(1, last_randomness)), api.Mul(final_opening, last_randomness)) - - api.AssertIsEqual(gpaResult.lastSumcheckValue, evaluated_value) - - // Change this after debug - gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + err = sparkSingleMatrix( + api, + arthur, + uapi, + sc, + circuit.SparkA, + circuit, + ) if err != nil { return err } - - claimedRS := gpaResultRSWS.claimedProducts[0] - claimedWS := gpaResultRSWS.claimedProducts[1] - - rsws_last_randomness := gpaResultRSWS.randomness[0] - rsws_evaluation_randomness := gpaResultRSWS.randomness[1:] - - rs_opening := api.Sub(api.Add(api.Mul(circuit.SparkA.RowRSAddressEvaluation, gamma, gamma), api.Mul(circuit.SparkA.RowRSValueEvaluation, gamma), circuit.SparkA.RowRSTimestampEvaluation), tau) - ws_opening := api.Sub(api.Add(api.Mul(circuit.SparkA.RowRSAddressEvaluation, gamma, gamma), api.Mul(circuit.SparkA.RowRSValueEvaluation, gamma), circuit.SparkA.RowRSTimestampEvaluation, 1), tau) - - rsws_evaluated_value := api.Add(api.Mul(rs_opening, api.Sub(1, rsws_last_randomness)), api.Mul(ws_opening, rsws_last_randomness)) - - api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) - - _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowwiseMerkle, circuit.RowwiseMerkleFirstRound, circuit.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, - [][]frontend.Variable{{circuit.SparkA.RowRSAddressEvaluation}, {circuit.SparkA.RowRSValueEvaluation}, {circuit.SparkA.RowRSTimestampEvaluation}}, - [][]frontend.Variable{rsws_evaluation_randomness}, - ) - - api.AssertIsEqual(api.Mul(claimedInit, claimedWS), api.Mul(claimedRS, claimedFinal)) - - _ = rsws_evaluation_randomness - - _ = gpaResultRSWS - _ = claimedInit - _ = claimedFinal - - _ = sumcheckCommitment - _ = colwiseCommitment - _ = rowwiseCommitment - _ = rowFinalCommitment - _ = colFinalCommitment - // _ = whirFoldingRandomness - - // _ = sumcheckRootHash - // _ = sumcheckBatchingRandomness - // _ = sumcheckInitialOODAnswers - // _ = sumcheckInitialOODQueries - - // _ = rowwiseRootHash - // _ = rowwiseBatchingRandomness - // _ = rowwiseInitialOODAnswers - // _ = rowwiseInitialOODQueries - - // _ = colwiseRootHash - // _ = colwiseBatchingRandomness - // _ = colwiseInitialOODAnswers - // _ = colwiseInitialOODQueries - - // _ = rowFinaltsRootHash - // _ = rowFinaltsBatchingRandomness - // _ = rowFinaltsInitialOODAnswers - // _ = rowFinaltsInitialOODQueries - - // _ = colFinaltsRootHash - // _ = colFinaltsBatchingRandomness - // _ = colFinaltsInitialOODAnswers - // _ = colFinaltsInitialOODQueries - - _ = uapi } else { // matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) @@ -311,6 +178,11 @@ func verifyCircuit( rowRSValueEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSValueEvaluation[0].Limbs) rowRSTimestampEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSTimestampEvaluation[0].Limbs) + colFinalCounter := typeConverters.LimbsToBigIntMod(hints.colFinalCounter[0].Limbs) + colRSAddressEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSAddressEvaluation[0].Limbs) + colRSValueEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSValueEvaluation[0].Limbs) + colRSTimestampEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSTimestampEvaluation[0].Limbs) + fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) matrixA := make([]MatrixCell, len(internedR1CS.A.Values)) @@ -380,27 +252,49 @@ func verifyCircuit( MatrixB: matrixB, MatrixC: matrixC, - SPARKIO: []byte(sparkConfig.IOPattern), - SPARKTranscript: sparkContTranscript, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - WHIRRow: NewWhirParams(sparkConfig.WHIRRow), - WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkContTranscript, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + + LogANumTerms: sparkConfig.LogANumTerms, + SparkSumcheckLast: contSparkSumcheckLast, + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), - LogANumTerms: sparkConfig.LogANumTerms, - SparkSumcheckLast: contSparkSumcheckLast, RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, true), RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, true), - RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), - RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, true), - SparkA: SPARKMatrixData{ RowFinalCounter: rowFinalCounter, RowRSAddressEvaluation: rowRSAddressEvaluation, RowRSValueEvaluation: rowRSValueEvaluation, RowRSTimestampEvaluation: rowRSTimestampEvaluation, + + ColFinalCounter: colFinalCounter, + ColRSAddressEvaluation: colRSAddressEvaluation, + ColRSValueEvaluation: colRSValueEvaluation, + ColRSTimestampEvaluation: colRSTimestampEvaluation, + + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), + SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), + + RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, true), + RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, true), + + RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), + RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, true), + + ColFinalMerkleFirstRound: newMerkle(hints.colFinalMerkle.firstRoundMerklePaths.path, true), + ColFinalMerkle: newMerkle(hints.colFinalMerkle.roundHints, true), + + ColwiseMerkleFirstRound: newMerkle(hints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), + ColwiseMerkle: newMerkle(hints.colwiseSparkMerkle.roundHints, true), + + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + LogANumTerms: sparkConfig.LogANumTerms, }, UseSpark: useSpark, @@ -457,27 +351,48 @@ func verifyCircuit( MatrixB: matrixB, MatrixC: matrixC, - SPARKIO: []byte(sparkConfig.IOPattern), - SPARKTranscript: sparkTranscriptT, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - WHIRRow: NewWhirParams(sparkConfig.WHIRRow), - WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkTranscriptT, + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + LogANumTerms: sparkConfig.LogANumTerms, + SparkSumcheckLast: sparkSumcheckLast, + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), - LogANumTerms: sparkConfig.LogANumTerms, - SparkSumcheckLast: sparkSumcheckLast, RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, false), RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, false), - RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), - RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, false), - SparkA: SPARKMatrixData{ RowFinalCounter: rowFinalCounter, RowRSAddressEvaluation: rowRSAddressEvaluation, RowRSValueEvaluation: rowRSValueEvaluation, RowRSTimestampEvaluation: rowRSTimestampEvaluation, + + ColFinalCounter: colFinalCounter, + ColRSAddressEvaluation: colRSAddressEvaluation, + ColRSValueEvaluation: colRSValueEvaluation, + ColRSTimestampEvaluation: colRSTimestampEvaluation, + + SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), + SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), + + RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, false), + RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, false), + + RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), + RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, false), + + ColFinalMerkleFirstRound: newMerkle(hints.colFinalMerkle.firstRoundMerklePaths.path, false), + ColFinalMerkle: newMerkle(hints.colFinalMerkle.roundHints, false), + + ColwiseMerkleFirstRound: newMerkle(hints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), + ColwiseMerkle: newMerkle(hints.colwiseSparkMerkle.roundHints, false), + + WHIRA3: NewWhirParams(sparkConfig.WHIRA3), + LogANumTerms: sparkConfig.LogANumTerms, }, UseSpark: useSpark, @@ -581,6 +496,188 @@ func CalculateAdr(api frontend.API, coefficients []frontend.Variable) frontend.V return ans } -func sparkSingleMatrix() { +func sparkSingleMatrix( + api frontend.API, + arthur gnarkNimue.Arthur, + uapi *uints.BinaryField[uints.U64], + sc *skyscraper.Skyscraper, + matrix SPARKMatrixData, + circuit *Circuit, +) error { + sumcheckCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + if err != nil { + return err + } + rowwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + if err != nil { + return err + } + colwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + if err != nil { + return err + } + + rowFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRRow) + if err != nil { + return err + } + colFinalCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRCol) + if err != nil { + return err + } + + // After debug: Change 1 to actual claimed value + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, 1, matrix.LogANumTerms, 4) + if err != nil { + return err + } + + _ = sparkSumcheckFoldingRandomness + _ = sparkSumcheckLastEval + + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, + // [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, + // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, + // ) + // if err != nil { + // return err + // } + + // Rowwise + + tauGammaTemp := make([]frontend.Variable, 2) + if err := arthur.FillChallengeScalars(tauGammaTemp); err != nil { + return err + } + tau := tauGammaTemp[0] + gamma := tauGammaTemp[1] + + // Change this debug statement + gpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + claimedInit := gpaResult.claimedProducts[0] + claimedFinal := gpaResult.claimedProducts[1] + + last_randomness := gpaResult.randomness[0] + evaluation_randomness := gpaResult.randomness[1:] + + addr := CalculateAdr(api, evaluation_randomness) + mem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, evaluation_randomness) + init_cntr := 0 + + init_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), init_cntr), tau) + + // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowFinalMerkle, circuit.RowFinalMerkleFirstRound, circuit.WHIRRow, [][]frontend.Variable{{}}, []frontend.Variable{}, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, rowFinaltsRootHash, + // [][]frontend.Variable{{matrix.RowFinalCounter}}, + // [][]frontend.Variable{evaluation_randomness}, + // ) + + final_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), matrix.RowFinalCounter), tau) + evaluated_value := api.Add(api.Mul(init_opening, api.Sub(1, last_randomness)), api.Mul(final_opening, last_randomness)) + + api.AssertIsEqual(gpaResult.lastSumcheckValue, evaluated_value) + + // Change this after debug + gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + claimedRS := gpaResultRSWS.claimedProducts[0] + claimedWS := gpaResultRSWS.claimedProducts[1] + + rsws_last_randomness := gpaResultRSWS.randomness[0] + rsws_evaluation_randomness := gpaResultRSWS.randomness[1:] + + rs_opening := api.Sub(api.Add(api.Mul(matrix.RowRSAddressEvaluation, gamma, gamma), api.Mul(matrix.RowRSValueEvaluation, gamma), matrix.RowRSTimestampEvaluation), tau) + ws_opening := api.Sub(api.Add(api.Mul(matrix.RowRSAddressEvaluation, gamma, gamma), api.Mul(matrix.RowRSValueEvaluation, gamma), matrix.RowRSTimestampEvaluation, 1), tau) + + rsws_evaluated_value := api.Add(api.Mul(rs_opening, api.Sub(1, rsws_last_randomness)), api.Mul(ws_opening, rsws_last_randomness)) + + api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) + + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, + [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, + [][]frontend.Variable{rsws_evaluation_randomness}, + ) + _ = rsws_evaluation_randomness + + api.AssertIsEqual(api.Mul(claimedInit, claimedWS), api.Mul(claimedRS, claimedFinal)) + + // Colwise + + _ = sumcheckCommitment + _ = colwiseCommitment + _ = rowwiseCommitment + _ = rowFinalCommitment + _ = colFinalCommitment + + // Colwise + + colwiseTauGammaTemp := make([]frontend.Variable, 2) + if err := arthur.FillChallengeScalars(colwiseTauGammaTemp); err != nil { + return err + } + colwiseTau := colwiseTauGammaTemp[0] + colwiseGamma := colwiseTauGammaTemp[1] + + // Change this debug statement + colwiseInitFinalGpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + colwiseClaimedInit := colwiseInitFinalGpaResult.claimedProducts[0] + colwiseClaimedFinal := colwiseInitFinalGpaResult.claimedProducts[1] + + colwiseLast_randomness := colwiseInitFinalGpaResult.randomness[0] + colwiseEvaluation_randomness := colwiseInitFinalGpaResult.randomness[1:] + + colwiseaddr := CalculateAdr(api, colwiseEvaluation_randomness) + // Add necessary multiplier here + colwisemem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, colwiseEvaluation_randomness) + colwiseinit_cntr := 0 + + colwiseinit_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), colwiseinit_cntr), colwiseTau) + + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, + [][]frontend.Variable{{matrix.ColFinalCounter}}, + [][]frontend.Variable{colwiseEvaluation_randomness}, + ) + + colwisefinal_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), matrix.ColFinalCounter), colwiseTau) + colwiseevaluated_value := api.Add(api.Mul(colwiseinit_opening, api.Sub(1, colwiseLast_randomness)), api.Mul(colwisefinal_opening, colwiseLast_randomness)) + api.AssertIsEqual(colwiseInitFinalGpaResult.lastSumcheckValue, colwiseevaluated_value) + + // Colwise RS WS + colwisegpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + if err != nil { + return err + } + + colwiseClaimedRS := colwisegpaResultRSWS.claimedProducts[0] + colwiseClaimedWS := colwisegpaResultRSWS.claimedProducts[1] + + colwisersws_last_randomness := colwisegpaResultRSWS.randomness[0] + colwisersws_evaluation_randomness := colwisegpaResultRSWS.randomness[1:] + + colwisers_opening := api.Sub(api.Add(api.Mul(matrix.ColRSAddressEvaluation, colwiseGamma, colwiseGamma), api.Mul(matrix.ColRSValueEvaluation, colwiseGamma), matrix.ColRSTimestampEvaluation), colwiseTau) + colwisews_opening := api.Sub(api.Add(api.Mul(matrix.ColRSAddressEvaluation, colwiseGamma, colwiseGamma), api.Mul(matrix.ColRSValueEvaluation, colwiseGamma), matrix.ColRSTimestampEvaluation, 1), colwiseTau) + + colwisersws_evaluated_value := api.Add(api.Mul(colwisers_opening, api.Sub(1, colwisersws_last_randomness)), api.Mul(colwisews_opening, colwisersws_last_randomness)) + + api.AssertIsEqual(colwisegpaResultRSWS.lastSumcheckValue, colwisersws_evaluated_value) + + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, + [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, + [][]frontend.Variable{colwisersws_evaluation_randomness}, + ) + + api.AssertIsEqual(api.Mul(colwiseClaimedInit, colwiseClaimedWS), api.Mul(colwiseClaimedRS, colwiseClaimedFinal)) + + return nil } diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index f4100392..1123ad76 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -119,11 +119,17 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var sparkMerklePaths []FullMultiPath[KeccakDigest] var sparkStirAnswers [][][]Fp256 var sparkClaimedEvaluations []Fp256 + var rowFinalCounter []Fp256 var rowRSAddressEvaluation []Fp256 var rowRSValueEvaluation []Fp256 var rowRSTimestampEvaluation []Fp256 + var colFinalCounter []Fp256 + var colRSAddressEvaluation []Fp256 + var colRSValueEvaluation []Fp256 + var colRSTimestampEvaluation []Fp256 + for _, op := range spark_io.Ops { switch op.Kind { case gnarkNimue.Hint: @@ -209,6 +215,50 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, return fmt.Errorf("failed to deserialize row_rs_timestamp_claimed_evaluation : %w", err) } rowRSTimestampEvaluation = append(rowRSTimestampEvaluation, temp) + case "col_final_counter_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize col_final_counter_claimed_evaluation : %w", err) + } + colFinalCounter = append(colFinalCounter, temp) + case "col_rs_address_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize col_rs_address_claimed_evaluation : %w", err) + } + colRSAddressEvaluation = append(colRSAddressEvaluation, temp) + case "col_rs_value_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize col_rs_value_claimed_evaluation : %w", err) + } + colRSValueEvaluation = append(colRSValueEvaluation, temp) + case "col_rs_timestamp_claimed_evaluation": + var temp Fp256 + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &temp, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize col_rs_timestamp_claimed_evaluation : %w", err) + } + colRSTimestampEvaluation = append(colRSTimestampEvaluation, temp) } if err != nil { @@ -257,24 +307,38 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, sparkSumcheckData := ZKHint{} rowFinal := ZKHint{} + // colwiseSparkMerkle := ZKHint{} + // colFinal := ZKHint{} + // var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) // fmt.Println("Aa", len(sparkMerklePaths)) // var rowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) // fmt.Println("Aa1", len(sparkMerklePaths)) - // fmt.Print("Len", len(rowFinal.firstRoundMerklePaths.path.merklePaths)) var rowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var colFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) + var colwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ - witnessHints: witnessData, - spartanHidingHint: hidingSpartanData, - sparkSumcheckData: sparkSumcheckData, - rowFinalMerkle: rowFinal, - rowFinalCounter: rowFinalCounter, + witnessHints: witnessData, + spartanHidingHint: hidingSpartanData, + sparkSumcheckData: sparkSumcheckData, + + rowFinalCounter: rowFinalCounter, + rowFinalMerkle: rowFinal, + rowRSAddressEvaluation: rowRSAddressEvaluation, rowRSValueEvaluation: rowRSValueEvaluation, rowRSTimestampEvaluation: rowRSTimestampEvaluation, rowwiseSparkMerkle: rowwiseSparkMerkle, + + colFinalCounter: colFinalCounter, + colFinalMerkle: colFinal, + + colRSAddressEvaluation: colRSAddressEvaluation, + colRSValueEvaluation: colRSValueEvaluation, + colRSTimestampEvaluation: colRSTimestampEvaluation, + colwiseSparkMerkle: colwiseSparkMerkle, } err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation, sparkClaimedEvaluations) diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index 44eebf22..a8438187 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -107,15 +107,23 @@ type Config struct { } type Hints struct { - witnessHints ZKHint - spartanHidingHint ZKHint - sparkSumcheckData ZKHint + witnessHints ZKHint + spartanHidingHint ZKHint + sparkSumcheckData ZKHint + rowFinalMerkle ZKHint rowFinalCounter []Fp256 rowRSAddressEvaluation []Fp256 rowRSValueEvaluation []Fp256 rowRSTimestampEvaluation []Fp256 rowwiseSparkMerkle ZKHint + + colFinalCounter []Fp256 + colFinalMerkle ZKHint + colRSAddressEvaluation []Fp256 + colRSValueEvaluation []Fp256 + colRSTimestampEvaluation []Fp256 + colwiseSparkMerkle ZKHint } type Hint struct { @@ -166,8 +174,31 @@ type Commitment struct { } type SPARKMatrixData struct { + WHIRA3 WHIRParams + LogANumTerms int + RowFinalCounter frontend.Variable RowRSAddressEvaluation frontend.Variable RowRSValueEvaluation frontend.Variable RowRSTimestampEvaluation frontend.Variable + + ColFinalCounter frontend.Variable + ColRSAddressEvaluation frontend.Variable + ColRSValueEvaluation frontend.Variable + ColRSTimestampEvaluation frontend.Variable + + SparkSumcheckFirstRound Merkle + SparkSumcheckMerkle Merkle + + RowFinalMerkleFirstRound Merkle + RowFinalMerkle Merkle + + RowwiseMerkleFirstRound Merkle + RowwiseMerkle Merkle + + ColFinalMerkleFirstRound Merkle + ColFinalMerkle Merkle + + ColwiseMerkleFirstRound Merkle + ColwiseMerkle Merkle } diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index fea48c4c..85be2572 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -220,95 +220,103 @@ pub fn verify_spark_single_matrix( ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); - // // Matrix A - Colwise + // Matrix A - Colwise - // let mut tau_and_gamma = [FieldElement::from(0); 2]; - // arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - // let tau = tau_and_gamma[0]; - // let gamma = tau_and_gamma[1]; + let mut tau_and_gamma = [FieldElement::from(0); 2]; + arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; - // let gpa_result = gpa_sumcheck_verifier( - // arthur, - // next_power_of_two(num_cols) + 2, - // )?; + // Colwise Init Final GPA - // let claimed_init = gpa_result.claimed_values[0]; - // let claimed_final = gpa_result.claimed_values[1]; + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_cols) + 2, + )?; - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; - // let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let init_mem = calculate_eq( - // &request.point_to_evaluate.col[1..], - // &evaluation_randomness.to_vec(), - // ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - // let init_cntr = FieldElement::from(0); + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - // let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = calculate_eq( + &request.point_to_evaluate.col[1..], + &evaluation_randomness.to_vec(), + ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); + let init_cntr = FieldElement::from(0); - // let final_cntr: FieldElement = arthur.hint()?; + let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - // let mut final_cntr_statement = - // Statement::::new(next_power_of_two(num_cols)); - // final_cntr_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // final_cntr, - // ); + let final_cntr: FieldElement = arthur.hint()?; - // let final_cntr_verifier = Verifier::new(col_config); - // final_cntr_verifier - // .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) - // .context("while verifying WHIR")?; + let mut final_cntr_statement = + Statement::::new(next_power_of_two(num_cols)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); - // let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - // let final_mem = calculate_eq( - // &request.point_to_evaluate.col[1..], - // &evaluation_randomness.to_vec(), - // ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); + let final_cntr_verifier = Verifier::new(col_config); + final_cntr_verifier + .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; - // let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = calculate_eq( + &request.point_to_evaluate.col[1..], + &evaluation_randomness.to_vec(), + ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - // let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - // + final_opening * last_randomness[0]; + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; - // let gpa_result = gpa_sumcheck_verifier( - // arthur, - // next_power_of_two(num_nonzero_terms) + 2, - // )?; + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + + // Colwise RS WS GPA + + let gpa_result = gpa_sumcheck_verifier( + arthur, + next_power_of_two(num_nonzero_terms) + 2, + )?; - // let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - // let claimed_rs = gpa_result.claimed_values[0]; - // let claimed_ws = gpa_result.claimed_values[1]; + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; - // let rs_adr: FieldElement = arthur.hint()?; - // let rs_mem: FieldElement = arthur.hint()?; - // let rs_timestamp: FieldElement = arthur.hint()?; + let rs_adr: FieldElement = arthur.hint()?; + let rs_mem: FieldElement = arthur.hint()?; + let rs_timestamp: FieldElement = arthur.hint()?; - // let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - // let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - // let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - // + ws_opening * last_randomness[0]; + let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + + ws_opening * last_randomness[0]; - // ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + println!("{:?}", rs_opening); //Reilabs Debug: + println!("{:?}", rs_adr); //Reilabs Debug: + println!("{:?}", evaluated_value); //Reilabs Debug: + println!("{:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - // let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( - // num_nonzero_terms, - // )); + let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( + num_nonzero_terms, + )); - // a_spark_colwise_statement_verifier.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - // rs_adr + - // rs_mem * a_colwise_commitment.batching_randomness + - // rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, - // ); + a_spark_colwise_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + rs_adr + + rs_mem * a_colwise_commitment.batching_randomness + + rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, + ); - // a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; - // ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); Ok(()) } diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 980f8ab0..20cafb08 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -176,7 +176,7 @@ pub fn prove_spark_for_single_matrix( rs_value_eval * rowwise_witness.batching_randomness + rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; - // assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); rowwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); @@ -184,106 +184,106 @@ pub fn prove_spark_for_single_matrix( let sumcheck_prover = Prover::new(batched_config.clone()); sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; - // // Colwise + // Colwise - // // Colwise Init Final GPA + // Colwise Init Final GPA - // let mut tau_and_gamma = [FieldElement::from(0); 2]; - // merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - // let tau = tau_and_gamma[0]; - // let gamma = tau_and_gamma[1]; + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; - // let init_address: Vec = (0..memory.eq_ry.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let init_value = memory.eq_ry.clone(); - // let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; + let init_address: Vec = (0..memory.eq_ry.len() as u64) + .map(FieldElement::from) + .collect(); + let init_value = memory.eq_ry.clone(); + let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; - // let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let init_vec: Vec = izip!(init_address, init_value, init_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let final_address: Vec = (0..memory.eq_ry.len() as u64) - // .map(FieldElement::from) - // .collect(); - // let final_value = memory.eq_ry.clone(); - // let final_timestamp = matrix.timestamps.final_col.clone(); + let final_address: Vec = (0..memory.eq_ry.len() as u64) + .map(FieldElement::from) + .collect(); + let final_value = memory.eq_ry.clone(); + let final_timestamp = matrix.timestamps.final_col.clone(); - // let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let final_vec: Vec = izip!(final_address, final_value, final_timestamp) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - // let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&final_col_eval)?; + let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&final_col_eval)?; - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // final_col_eval, - // whir_configs.col.clone(), - // final_col_ts_witness, - // )?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_col_eval, + whir_configs.col.clone(), + final_col_ts_witness, + )?; // // Colwise RS WS GPA - // let rs_address = matrix.coo.col.clone(); - // let rs_value = e_values.e_ry.clone(); - // let rs_timestamp = matrix.timestamps.read_col.clone(); + let rs_address = matrix.coo.col.clone(); + let rs_value = e_values.e_ry.clone(); + let rs_timestamp = matrix.timestamps.read_col.clone(); - // let rs_vec: Vec = - // izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let rs_vec: Vec = + izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let ws_address = matrix.coo.col.clone(); - // let ws_value = e_values.e_ry.clone(); - // let ws_timestamp: Vec = matrix - // .timestamps - // .read_col - // .into_iter() - // .map(|a| a + FieldElement::from(1)) - // .collect(); + let ws_address = matrix.coo.col.clone(); + let ws_value = e_values.e_ry.clone(); + let ws_timestamp: Vec = matrix + .timestamps + .read_col + .into_iter() + .map(|a| a + FieldElement::from(1)) + .collect(); - // let ws_vec: Vec = - // izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - // .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - // .collect(); + let ws_vec: Vec = + izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) + .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - // let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - // let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - // let rs_address_eval = EvaluationsList::new(rs_address) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_address_eval)?; + let rs_address_eval = EvaluationsList::new(rs_address) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_address_eval)?; - // let rs_value_eval = EvaluationsList::new(rs_value) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_value_eval)?; + let rs_value_eval = EvaluationsList::new(rs_value) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_value_eval)?; - // let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - // .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - // merlin.hint(&rs_timestamp_eval)?; + let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); + merlin.hint(&rs_timestamp_eval)?; - // let mut colwise_statement = Statement::::new(evaluation_randomness.len()); + let mut colwise_statement = Statement::::new(evaluation_randomness.len()); - // let claimed_colwise_eval = - // rs_address_eval + - // rs_value_eval * colwise_witness.batching_randomness + - // rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; + let claimed_colwise_eval = + rs_address_eval + + rs_value_eval * colwise_witness.batching_randomness + + rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; - // assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - // colwise_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); + colwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - // let sumcheck_prover = Prover::new(batched_config.clone()); - // sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; + let sumcheck_prover = Prover::new(batched_config.clone()); + sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index ec67abb7..4b920589 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -71,27 +71,27 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { // // Colwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.a.num_cols) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.a.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Col final counter claimed evaluation") - // .add_whir_proof(&configs.col); + io = io + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&configs.col); - // for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.a.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.a_3batched); + io = io + .hint("col_rs_address_claimed_evaluation") + .hint("col_rs_value_claimed_evaluation") + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.a_3batched); // // Matrix B From 10cf5ae25a476d4ddee604438a9f19c4ad22c991 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 30 Sep 2025 14:23:21 +0800 Subject: [PATCH 19/34] Cleanup --- recursive-verifier/app/circuit/circuit.go | 20 -------------------- recursive-verifier/app/circuit/common.go | 1 + 2 files changed, 1 insertion(+), 20 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index d882eb49..8c84b1e9 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -46,18 +46,12 @@ type Circuit struct { SPARKIO []byte Transcript []uints.U8 `gnark:",public"` - WHIRA3 WHIRParams WHIRRow WHIRParams WHIRCol WHIRParams SparkSumcheckLast []frontend.Variable SparkA SPARKMatrixData - - SparkSumcheckFirstRound Merkle - SparkSumcheckMerkle Merkle - RowFinalMerkleFirstRound Merkle - RowFinalMerkle Merkle } func (circuit *Circuit) Define(api frontend.API) error { @@ -254,19 +248,12 @@ func verifyCircuit( SPARKIO: []byte(sparkConfig.IOPattern), SPARKTranscript: sparkContTranscript, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), WHIRRow: NewWhirParams(sparkConfig.WHIRRow), WHIRCol: NewWhirParams(sparkConfig.WHIRCol), LogANumTerms: sparkConfig.LogANumTerms, SparkSumcheckLast: contSparkSumcheckLast, - SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), - SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), - - RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, true), - RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, true), - SparkA: SPARKMatrixData{ RowFinalCounter: rowFinalCounter, RowRSAddressEvaluation: rowRSAddressEvaluation, @@ -353,18 +340,11 @@ func verifyCircuit( SPARKIO: []byte(sparkConfig.IOPattern), SPARKTranscript: sparkTranscriptT, - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), WHIRRow: NewWhirParams(sparkConfig.WHIRRow), WHIRCol: NewWhirParams(sparkConfig.WHIRCol), LogANumTerms: sparkConfig.LogANumTerms, SparkSumcheckLast: sparkSumcheckLast, - SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), - SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), - - RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, false), - RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, false), - SparkA: SPARKMatrixData{ RowFinalCounter: rowFinalCounter, RowRSAddressEvaluation: rowRSAddressEvaluation, diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 1123ad76..351964d1 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -307,6 +307,7 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, sparkSumcheckData := ZKHint{} rowFinal := ZKHint{} + // colwiseSparkMerkle := ZKHint{} // colFinal := ZKHint{} From a189da6d638e2b193d37689f26753cf2ce33901a Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 30 Sep 2025 17:14:30 +0800 Subject: [PATCH 20/34] Adds B and C matrices --- recursive-verifier/app/circuit/circuit.go | 320 ++++++++++++++------ recursive-verifier/app/circuit/common.go | 114 ++++--- recursive-verifier/app/circuit/types.go | 43 ++- spark-prover/src/bin/spark-verifier.rs | 60 ++-- spark-prover/src/main.rs | 40 +-- spark-prover/src/spark.rs | 32 +- spark-prover/src/utilities/iopattern/mod.rs | 192 ++++++------ spark-prover/src/utilities/mod.rs | 4 + 8 files changed, 511 insertions(+), 294 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 8c84b1e9..132ce25f 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -1,6 +1,7 @@ package circuit import ( + "fmt" "log" "os" @@ -49,9 +50,9 @@ type Circuit struct { WHIRRow WHIRParams WHIRCol WHIRParams - SparkSumcheckLast []frontend.Variable - SparkA SPARKMatrixData + SparkB SPARKMatrixData + SparkC SPARKMatrixData } func (circuit *Circuit) Define(api frontend.API) error { @@ -60,7 +61,7 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } - // rootHash, batchingRandomness, initialOODQueries, initialOODAnswers, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) + // spartanCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) // if err != nil { // return err @@ -77,10 +78,7 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } - // _ = spartanSumcheckRand - // _ = spartanSumcheckLastValue - - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, batchingRandomness, initialOODQueries, initialOODAnswers, rootHash, + // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, spartanCommitment, // [][]frontend.Variable{{}, {}}, // [][]frontend.Variable{}, // ) @@ -89,15 +87,6 @@ func (circuit *Circuit) Define(api frontend.API) error { // return err // } - // _ = whirFoldingRandomness - - // _ = rootHash - // _ = batchingRandomness - // _ = initialOODQueries - // _ = initialOODAnswers - // _ = sc - // _ = uapi - // x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) // api.AssertIsEqual(spartanSumcheckLastValue, x) @@ -123,6 +112,30 @@ func (circuit *Circuit) Define(api frontend.API) error { if err != nil { return err } + + err = sparkSingleMatrix( + api, + arthur, + uapi, + sc, + circuit.SparkB, + circuit, + ) + if err != nil { + return err + } + + err = sparkSingleMatrix( + api, + arthur, + uapi, + sc, + circuit.SparkC, + circuit, + ) + if err != nil { + return err + } } else { // matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) @@ -135,7 +148,7 @@ func (circuit *Circuit) Define(api frontend.API) error { } func verifyCircuit( - deferred []Fp256, cfg Config, sparkConfig SparkConfig, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, evaluation string, sparkSumcheck []Fp256, + deferred []Fp256, cfg Config, sparkConfig SparkConfig, hints Hints, pk *groth16.ProvingKey, vk *groth16.VerifyingKey, outputCcsPath string, claimedEvaluations ClaimedEvaluations, internedR1CS R1CS, interner Interner, evaluation string, ) error { transcriptT := make([]uints.U8, cfg.TranscriptLen) contTranscript := make([]uints.U8, cfg.TranscriptLen) @@ -161,21 +174,25 @@ func verifyCircuit( witnessLinearStatementEvaluations[1] = typeConverters.LimbsToBigIntMod(deferred[2].Limbs) witnessLinearStatementEvaluations[2] = typeConverters.LimbsToBigIntMod(deferred[3].Limbs) - contSparkSumcheckLast := make([]frontend.Variable, 3) - sparkSumcheckLast := make([]frontend.Variable, 3) - sparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(sparkSumcheck[0].Limbs) - sparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(sparkSumcheck[1].Limbs) - sparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(sparkSumcheck[2].Limbs) + acontSparkSumcheckLast := make([]frontend.Variable, 3) + asparkSumcheckLast := make([]frontend.Variable, 3) + asparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[0].Limbs) + asparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[1].Limbs) + asparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[2].Limbs) + + bcontSparkSumcheckLast := make([]frontend.Variable, 3) + bsparkSumcheckLast := make([]frontend.Variable, 3) + bsparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[0].Limbs) + bsparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[1].Limbs) + bsparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[2].Limbs) - rowFinalCounter := typeConverters.LimbsToBigIntMod(hints.rowFinalCounter[0].Limbs) - rowRSAddressEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSAddressEvaluation[0].Limbs) - rowRSValueEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSValueEvaluation[0].Limbs) - rowRSTimestampEvaluation := typeConverters.LimbsToBigIntMod(hints.rowRSTimestampEvaluation[0].Limbs) + ccontSparkSumcheckLast := make([]frontend.Variable, 3) + csparkSumcheckLast := make([]frontend.Variable, 3) + csparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[0].Limbs) + csparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[1].Limbs) + csparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[2].Limbs) - colFinalCounter := typeConverters.LimbsToBigIntMod(hints.colFinalCounter[0].Limbs) - colRSAddressEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSAddressEvaluation[0].Limbs) - colRSValueEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSValueEvaluation[0].Limbs) - colRSTimestampEvaluation := typeConverters.LimbsToBigIntMod(hints.colRSTimestampEvaluation[0].Limbs) + fmt.Print(bsparkSumcheckLast) fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) @@ -251,39 +268,104 @@ func verifyCircuit( WHIRRow: NewWhirParams(sparkConfig.WHIRRow), WHIRCol: NewWhirParams(sparkConfig.WHIRCol), - LogANumTerms: sparkConfig.LogANumTerms, - SparkSumcheckLast: contSparkSumcheckLast, + LogANumTerms: sparkConfig.LogANumTerms, SparkA: SPARKMatrixData{ - RowFinalCounter: rowFinalCounter, - RowRSAddressEvaluation: rowRSAddressEvaluation, - RowRSValueEvaluation: rowRSValueEvaluation, - RowRSTimestampEvaluation: rowRSTimestampEvaluation, + SparkSumcheckLast: acontSparkSumcheckLast, - ColFinalCounter: colFinalCounter, - ColRSAddressEvaluation: colRSAddressEvaluation, - ColRSValueEvaluation: colRSValueEvaluation, - ColRSTimestampEvaluation: colRSTimestampEvaluation, + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSTimestampEvaluation.Limbs), - SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, true), - SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, true), + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSTimestampEvaluation.Limbs), - RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, true), - RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, true), + SparkSumcheckFirstRound: newMerkle(hints.AHints.sparkSumcheckData.firstRoundMerklePaths.path, true), + SparkSumcheckMerkle: newMerkle(hints.AHints.sparkSumcheckData.roundHints, true), - RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), - RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, true), + RowFinalMerkleFirstRound: newMerkle(hints.AHints.rowFinalMerkle.firstRoundMerklePaths.path, true), + RowFinalMerkle: newMerkle(hints.AHints.rowFinalMerkle.roundHints, true), - ColFinalMerkleFirstRound: newMerkle(hints.colFinalMerkle.firstRoundMerklePaths.path, true), - ColFinalMerkle: newMerkle(hints.colFinalMerkle.roundHints, true), + RowwiseMerkleFirstRound: newMerkle(hints.AHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), + RowwiseMerkle: newMerkle(hints.AHints.rowwiseSparkMerkle.roundHints, true), - ColwiseMerkleFirstRound: newMerkle(hints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), - ColwiseMerkle: newMerkle(hints.colwiseSparkMerkle.roundHints, true), + ColFinalMerkleFirstRound: newMerkle(hints.AHints.colFinalMerkle.firstRoundMerklePaths.path, true), + ColFinalMerkle: newMerkle(hints.AHints.colFinalMerkle.roundHints, true), + + ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), + ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, true), WHIRA3: NewWhirParams(sparkConfig.WHIRA3), LogANumTerms: sparkConfig.LogANumTerms, }, + SparkB: SPARKMatrixData{ + SparkSumcheckLast: bcontSparkSumcheckLast, + + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSTimestampEvaluation.Limbs), + + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSTimestampEvaluation.Limbs), + + SparkSumcheckFirstRound: newMerkle(hints.BHints.sparkSumcheckData.firstRoundMerklePaths.path, true), + SparkSumcheckMerkle: newMerkle(hints.BHints.sparkSumcheckData.roundHints, true), + + RowFinalMerkleFirstRound: newMerkle(hints.BHints.rowFinalMerkle.firstRoundMerklePaths.path, true), + RowFinalMerkle: newMerkle(hints.BHints.rowFinalMerkle.roundHints, true), + + RowwiseMerkleFirstRound: newMerkle(hints.BHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), + RowwiseMerkle: newMerkle(hints.BHints.rowwiseSparkMerkle.roundHints, true), + + ColFinalMerkleFirstRound: newMerkle(hints.BHints.colFinalMerkle.firstRoundMerklePaths.path, true), + ColFinalMerkle: newMerkle(hints.BHints.colFinalMerkle.roundHints, true), + + ColwiseMerkleFirstRound: newMerkle(hints.BHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), + ColwiseMerkle: newMerkle(hints.BHints.colwiseSparkMerkle.roundHints, true), + + WHIRA3: NewWhirParams(sparkConfig.WHIRB3), + LogANumTerms: sparkConfig.LogBNumTerms, + }, + + SparkC: SPARKMatrixData{ + SparkSumcheckLast: ccontSparkSumcheckLast, + + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSTimestampEvaluation.Limbs), + + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSTimestampEvaluation.Limbs), + + SparkSumcheckFirstRound: newMerkle(hints.CHints.sparkSumcheckData.firstRoundMerklePaths.path, true), + SparkSumcheckMerkle: newMerkle(hints.CHints.sparkSumcheckData.roundHints, true), + + RowFinalMerkleFirstRound: newMerkle(hints.CHints.rowFinalMerkle.firstRoundMerklePaths.path, true), + RowFinalMerkle: newMerkle(hints.CHints.rowFinalMerkle.roundHints, true), + + RowwiseMerkleFirstRound: newMerkle(hints.CHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), + RowwiseMerkle: newMerkle(hints.CHints.rowwiseSparkMerkle.roundHints, true), + + ColFinalMerkleFirstRound: newMerkle(hints.CHints.colFinalMerkle.firstRoundMerklePaths.path, true), + ColFinalMerkle: newMerkle(hints.CHints.colFinalMerkle.roundHints, true), + + ColwiseMerkleFirstRound: newMerkle(hints.CHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), + ColwiseMerkle: newMerkle(hints.CHints.colwiseSparkMerkle.roundHints, true), + + WHIRA3: NewWhirParams(sparkConfig.WHIRC3), + LogANumTerms: sparkConfig.LogCNumTerms, + }, + UseSpark: useSpark, } @@ -338,43 +420,108 @@ func verifyCircuit( MatrixB: matrixB, MatrixC: matrixC, - SPARKIO: []byte(sparkConfig.IOPattern), - SPARKTranscript: sparkTranscriptT, - WHIRRow: NewWhirParams(sparkConfig.WHIRRow), - WHIRCol: NewWhirParams(sparkConfig.WHIRCol), - LogANumTerms: sparkConfig.LogANumTerms, - SparkSumcheckLast: sparkSumcheckLast, + SPARKIO: []byte(sparkConfig.IOPattern), + SPARKTranscript: sparkTranscriptT, + WHIRRow: NewWhirParams(sparkConfig.WHIRRow), + WHIRCol: NewWhirParams(sparkConfig.WHIRCol), + LogANumTerms: sparkConfig.LogANumTerms, SparkA: SPARKMatrixData{ - RowFinalCounter: rowFinalCounter, - RowRSAddressEvaluation: rowRSAddressEvaluation, - RowRSValueEvaluation: rowRSValueEvaluation, - RowRSTimestampEvaluation: rowRSTimestampEvaluation, + SparkSumcheckLast: asparkSumcheckLast, - ColFinalCounter: colFinalCounter, - ColRSAddressEvaluation: colRSAddressEvaluation, - ColRSValueEvaluation: colRSValueEvaluation, - ColRSTimestampEvaluation: colRSTimestampEvaluation, + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSTimestampEvaluation.Limbs), - SparkSumcheckFirstRound: newMerkle(hints.sparkSumcheckData.firstRoundMerklePaths.path, false), - SparkSumcheckMerkle: newMerkle(hints.sparkSumcheckData.roundHints, false), + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.colRSTimestampEvaluation.Limbs), - RowFinalMerkleFirstRound: newMerkle(hints.rowFinalMerkle.firstRoundMerklePaths.path, false), - RowFinalMerkle: newMerkle(hints.rowFinalMerkle.roundHints, false), + SparkSumcheckFirstRound: newMerkle(hints.AHints.sparkSumcheckData.firstRoundMerklePaths.path, false), + SparkSumcheckMerkle: newMerkle(hints.AHints.sparkSumcheckData.roundHints, false), - RowwiseMerkleFirstRound: newMerkle(hints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), - RowwiseMerkle: newMerkle(hints.rowwiseSparkMerkle.roundHints, false), + RowFinalMerkleFirstRound: newMerkle(hints.AHints.rowFinalMerkle.firstRoundMerklePaths.path, false), + RowFinalMerkle: newMerkle(hints.AHints.rowFinalMerkle.roundHints, false), - ColFinalMerkleFirstRound: newMerkle(hints.colFinalMerkle.firstRoundMerklePaths.path, false), - ColFinalMerkle: newMerkle(hints.colFinalMerkle.roundHints, false), + RowwiseMerkleFirstRound: newMerkle(hints.AHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), + RowwiseMerkle: newMerkle(hints.AHints.rowwiseSparkMerkle.roundHints, false), - ColwiseMerkleFirstRound: newMerkle(hints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), - ColwiseMerkle: newMerkle(hints.colwiseSparkMerkle.roundHints, false), + ColFinalMerkleFirstRound: newMerkle(hints.AHints.colFinalMerkle.firstRoundMerklePaths.path, false), + ColFinalMerkle: newMerkle(hints.AHints.colFinalMerkle.roundHints, false), + + ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), + ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, false), WHIRA3: NewWhirParams(sparkConfig.WHIRA3), LogANumTerms: sparkConfig.LogANumTerms, }, + SparkB: SPARKMatrixData{ + SparkSumcheckLast: bsparkSumcheckLast, + + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSTimestampEvaluation.Limbs), + + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSTimestampEvaluation.Limbs), + + SparkSumcheckFirstRound: newMerkle(hints.BHints.sparkSumcheckData.firstRoundMerklePaths.path, false), + SparkSumcheckMerkle: newMerkle(hints.BHints.sparkSumcheckData.roundHints, false), + + RowFinalMerkleFirstRound: newMerkle(hints.BHints.rowFinalMerkle.firstRoundMerklePaths.path, false), + RowFinalMerkle: newMerkle(hints.BHints.rowFinalMerkle.roundHints, false), + + RowwiseMerkleFirstRound: newMerkle(hints.BHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), + RowwiseMerkle: newMerkle(hints.BHints.rowwiseSparkMerkle.roundHints, false), + + ColFinalMerkleFirstRound: newMerkle(hints.BHints.colFinalMerkle.firstRoundMerklePaths.path, false), + ColFinalMerkle: newMerkle(hints.BHints.colFinalMerkle.roundHints, false), + + ColwiseMerkleFirstRound: newMerkle(hints.BHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), + ColwiseMerkle: newMerkle(hints.BHints.colwiseSparkMerkle.roundHints, false), + + WHIRA3: NewWhirParams(sparkConfig.WHIRB3), + LogANumTerms: sparkConfig.LogBNumTerms, + }, + + SparkC: SPARKMatrixData{ + SparkSumcheckLast: csparkSumcheckLast, + + RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), + RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSAddressEvaluation.Limbs), + RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSValueEvaluation.Limbs), + RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSTimestampEvaluation.Limbs), + + ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.colFinalCounter.Limbs), + ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSAddressEvaluation.Limbs), + ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSValueEvaluation.Limbs), + ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSTimestampEvaluation.Limbs), + + SparkSumcheckFirstRound: newMerkle(hints.CHints.sparkSumcheckData.firstRoundMerklePaths.path, false), + SparkSumcheckMerkle: newMerkle(hints.CHints.sparkSumcheckData.roundHints, false), + + RowFinalMerkleFirstRound: newMerkle(hints.CHints.rowFinalMerkle.firstRoundMerklePaths.path, false), + RowFinalMerkle: newMerkle(hints.CHints.rowFinalMerkle.roundHints, false), + + RowwiseMerkleFirstRound: newMerkle(hints.CHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), + RowwiseMerkle: newMerkle(hints.CHints.rowwiseSparkMerkle.roundHints, false), + + ColFinalMerkleFirstRound: newMerkle(hints.CHints.colFinalMerkle.firstRoundMerklePaths.path, false), + ColFinalMerkle: newMerkle(hints.CHints.colFinalMerkle.roundHints, false), + + ColwiseMerkleFirstRound: newMerkle(hints.CHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), + ColwiseMerkle: newMerkle(hints.CHints.colwiseSparkMerkle.roundHints, false), + + WHIRA3: NewWhirParams(sparkConfig.WHIRC3), + LogANumTerms: sparkConfig.LogCNumTerms, + }, + UseSpark: useSpark, } @@ -580,10 +727,10 @@ func sparkSingleMatrix( api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, - [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, - [][]frontend.Variable{rsws_evaluation_randomness}, - ) + // _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, + // [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, + // [][]frontend.Variable{rsws_evaluation_randomness}, + // ) _ = rsws_evaluation_randomness api.AssertIsEqual(api.Mul(claimedInit, claimedWS), api.Mul(claimedRS, claimedFinal)) @@ -624,10 +771,10 @@ func sparkSingleMatrix( colwiseinit_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), colwiseinit_cntr), colwiseTau) - _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, - [][]frontend.Variable{{matrix.ColFinalCounter}}, - [][]frontend.Variable{colwiseEvaluation_randomness}, - ) + // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, + // [][]frontend.Variable{{matrix.ColFinalCounter}}, + // [][]frontend.Variable{colwiseEvaluation_randomness}, + // ) colwisefinal_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), matrix.ColFinalCounter), colwiseTau) colwiseevaluated_value := api.Add(api.Mul(colwiseinit_opening, api.Sub(1, colwiseLast_randomness)), api.Mul(colwisefinal_opening, colwiseLast_randomness)) @@ -652,10 +799,11 @@ func sparkSingleMatrix( api.AssertIsEqual(colwisegpaResultRSWS.lastSumcheckValue, colwisersws_evaluated_value) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, - [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, - [][]frontend.Variable{colwisersws_evaluation_randomness}, - ) + // _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, + // [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, + // [][]frontend.Variable{colwisersws_evaluation_randomness}, + // ) + _ = colwisersws_evaluation_randomness api.AssertIsEqual(api.Mul(colwiseClaimedInit, colwiseClaimedWS), api.Mul(colwiseClaimedRS, colwiseClaimedFinal)) diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 351964d1..015af622 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -118,7 +118,7 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var sparkMerklePaths []FullMultiPath[KeccakDigest] var sparkStirAnswers [][][]Fp256 - var sparkClaimedEvaluations []Fp256 + var sparkClaimedEvaluations [][]Fp256 var rowFinalCounter []Fp256 var rowRSAddressEvaluation []Fp256 @@ -163,14 +163,16 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, ) sparkStirAnswers = append(sparkStirAnswers, stirAnswersTemporary) case "sumcheck_last_folds": + var temp []Fp256 _, err = arkSerialize.CanonicalDeserializeWithMode( bytes.NewReader(sparkConfig.Transcript[start:end]), - &sparkClaimedEvaluations, + &temp, false, false, ) if err != nil { return fmt.Errorf("failed to deserialize spark_last_folds: %w", err) } + sparkClaimedEvaluations = append(sparkClaimedEvaluations, temp) case "row_final_counter_claimed_evaluation": var temp Fp256 _, err = arkSerialize.CanonicalDeserializeWithMode( @@ -284,7 +286,6 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, } sparkConfig.Transcript = spark_truncated_transcript - // Spark end internerBytes, err := hex.DecodeString(r1cs.Interner.Values) if err != nil { @@ -300,49 +301,92 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, } var hidingSpartanData = consumeWhirData(config.WHIRConfigHidingSpartan, &merklePaths, &stirAnswers) - var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) - // Read from spark - - sparkSumcheckData := ZKHint{} - rowFinal := ZKHint{} - - // colwiseSparkMerkle := ZKHint{} - // colFinal := ZKHint{} + var asparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var arowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) + var arowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var acolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) + var acolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) - // var sparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) - // fmt.Println("Aa", len(sparkMerklePaths)) - // var rowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) - // fmt.Println("Aa1", len(sparkMerklePaths)) + var bsparkSumcheckData = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) + var browFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) + var browwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) + var bcolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) + var bcolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - var rowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) - var colFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) - var colwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var csparkSumcheckData = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) + var crowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) + var crowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) + var ccolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) + var ccolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ witnessHints: witnessData, spartanHidingHint: hidingSpartanData, - sparkSumcheckData: sparkSumcheckData, - - rowFinalCounter: rowFinalCounter, - rowFinalMerkle: rowFinal, - - rowRSAddressEvaluation: rowRSAddressEvaluation, - rowRSValueEvaluation: rowRSValueEvaluation, - rowRSTimestampEvaluation: rowRSTimestampEvaluation, - rowwiseSparkMerkle: rowwiseSparkMerkle, - - colFinalCounter: colFinalCounter, - colFinalMerkle: colFinal, - colRSAddressEvaluation: colRSAddressEvaluation, - colRSValueEvaluation: colRSValueEvaluation, - colRSTimestampEvaluation: colRSTimestampEvaluation, - colwiseSparkMerkle: colwiseSparkMerkle, + AHints: SparkMatrixHints{ + sparkSumcheckData: asparkSumcheckData, + rowFinalMerkle: arowFinal, + rowwiseSparkMerkle: arowwiseSparkMerkle, + colFinalMerkle: acolFinal, + colwiseSparkMerkle: acolwiseSparkMerkle, + + sparkClaimedEvaluations: sparkClaimedEvaluations[0], + + rowFinalCounter: rowFinalCounter[0], + rowRSAddressEvaluation: rowRSAddressEvaluation[0], + rowRSValueEvaluation: rowRSValueEvaluation[0], + rowRSTimestampEvaluation: rowRSTimestampEvaluation[0], + + colFinalCounter: colFinalCounter[0], + colRSAddressEvaluation: colRSAddressEvaluation[0], + colRSValueEvaluation: colRSValueEvaluation[0], + colRSTimestampEvaluation: colRSTimestampEvaluation[0], + }, + + BHints: SparkMatrixHints{ + sparkSumcheckData: bsparkSumcheckData, + rowFinalMerkle: browFinal, + rowwiseSparkMerkle: browwiseSparkMerkle, + colFinalMerkle: bcolFinal, + colwiseSparkMerkle: bcolwiseSparkMerkle, + + sparkClaimedEvaluations: sparkClaimedEvaluations[1], + + rowFinalCounter: rowFinalCounter[1], + rowRSAddressEvaluation: rowRSAddressEvaluation[1], + rowRSValueEvaluation: rowRSValueEvaluation[1], + rowRSTimestampEvaluation: rowRSTimestampEvaluation[1], + + colFinalCounter: colFinalCounter[1], + colRSAddressEvaluation: colRSAddressEvaluation[1], + colRSValueEvaluation: colRSValueEvaluation[1], + colRSTimestampEvaluation: colRSTimestampEvaluation[1], + }, + + CHints: SparkMatrixHints{ + sparkSumcheckData: csparkSumcheckData, + rowFinalMerkle: crowFinal, + rowwiseSparkMerkle: crowwiseSparkMerkle, + colFinalMerkle: ccolFinal, + colwiseSparkMerkle: ccolwiseSparkMerkle, + + sparkClaimedEvaluations: sparkClaimedEvaluations[2], + + rowFinalCounter: rowFinalCounter[2], + rowRSAddressEvaluation: rowRSAddressEvaluation[2], + rowRSValueEvaluation: rowRSValueEvaluation[2], + rowRSTimestampEvaluation: rowRSTimestampEvaluation[2], + + colFinalCounter: colFinalCounter[2], + colRSAddressEvaluation: colRSAddressEvaluation[2], + colRSValueEvaluation: colRSValueEvaluation[2], + colRSTimestampEvaluation: colRSTimestampEvaluation[2], + }, } - err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation, sparkClaimedEvaluations) + err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation) if err != nil { return fmt.Errorf("verification failed: %w", err) diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index a8438187..22b67cf5 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -109,21 +109,10 @@ type Config struct { type Hints struct { witnessHints ZKHint spartanHidingHint ZKHint - sparkSumcheckData ZKHint - rowFinalMerkle ZKHint - rowFinalCounter []Fp256 - rowRSAddressEvaluation []Fp256 - rowRSValueEvaluation []Fp256 - rowRSTimestampEvaluation []Fp256 - rowwiseSparkMerkle ZKHint - - colFinalCounter []Fp256 - colFinalMerkle ZKHint - colRSAddressEvaluation []Fp256 - colRSValueEvaluation []Fp256 - colRSTimestampEvaluation []Fp256 - colwiseSparkMerkle ZKHint + AHints SparkMatrixHints + BHints SparkMatrixHints + CHints SparkMatrixHints } type Hint struct { @@ -161,9 +150,13 @@ type SparkConfig struct { IOPattern string `json:"io_pattern"` Transcript []byte `json:"transcript"` WHIRA3 WHIRConfig `json:"whir_a3"` + WHIRB3 WHIRConfig `json:"whir_b3"` + WHIRC3 WHIRConfig `json:"whir_c3"` WHIRRow WHIRConfig `json:"whir_row"` WHIRCol WHIRConfig `json:"whir_col"` LogANumTerms int `json:"log_a_num_terms"` + LogBNumTerms int `json:"log_b_num_terms"` + LogCNumTerms int `json:"log_c_num_terms"` } type Commitment struct { @@ -177,6 +170,8 @@ type SPARKMatrixData struct { WHIRA3 WHIRParams LogANumTerms int + SparkSumcheckLast []frontend.Variable + RowFinalCounter frontend.Variable RowRSAddressEvaluation frontend.Variable RowRSValueEvaluation frontend.Variable @@ -202,3 +197,23 @@ type SPARKMatrixData struct { ColwiseMerkleFirstRound Merkle ColwiseMerkle Merkle } + +type SparkMatrixHints struct { + sparkSumcheckData ZKHint + rowFinalMerkle ZKHint + rowwiseSparkMerkle ZKHint + colFinalMerkle ZKHint + colwiseSparkMerkle ZKHint + + sparkClaimedEvaluations []Fp256 + + rowFinalCounter Fp256 + rowRSAddressEvaluation Fp256 + rowRSValueEvaluation Fp256 + rowRSTimestampEvaluation Fp256 + + colFinalCounter Fp256 + colRSAddressEvaluation Fp256 + colRSValueEvaluation Fp256 + colRSTimestampEvaluation Fp256 +} diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 85be2572..63a37a6e 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -50,29 +50,29 @@ fn main() -> Result<()> { &request.claimed_values.a, )?; - // verify_spark_single_matrix( - // &spark_proof.whir_params.row, - // &spark_proof.whir_params.col, - // &spark_proof.whir_params.b_3batched, - // spark_proof.matrix_dimensions.num_rows, - // spark_proof.matrix_dimensions.num_cols, - // spark_proof.matrix_dimensions.b_nonzero_terms, - // &mut arthur, - // &request, - // &request.claimed_values.b, - // )?; - - // verify_spark_single_matrix( - // &spark_proof.whir_params.row, - // &spark_proof.whir_params.col, - // &spark_proof.whir_params.c_3batched, - // spark_proof.matrix_dimensions.num_rows, - // spark_proof.matrix_dimensions.num_cols, - // spark_proof.matrix_dimensions.c_nonzero_terms, - // &mut arthur, - // &request, - // &request.claimed_values.c, - // )?; + verify_spark_single_matrix( + &spark_proof.whir_params.row, + &spark_proof.whir_params.col, + &spark_proof.whir_params.b_3batched, + spark_proof.matrix_dimensions.num_rows, + spark_proof.matrix_dimensions.num_cols, + spark_proof.matrix_dimensions.b_nonzero_terms, + &mut arthur, + &request, + &request.claimed_values.b, + )?; + + verify_spark_single_matrix( + &spark_proof.whir_params.row, + &spark_proof.whir_params.col, + &spark_proof.whir_params.c_3batched, + spark_proof.matrix_dimensions.num_rows, + spark_proof.matrix_dimensions.num_cols, + spark_proof.matrix_dimensions.c_nonzero_terms, + &mut arthur, + &request, + &request.claimed_values.c, + )?; Ok(()) } @@ -126,7 +126,7 @@ pub fn verify_spark_single_matrix( // final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, // ); - let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + // let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); // a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; // Matrix A - Rowwise @@ -216,7 +216,7 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, ); - a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + // a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -257,10 +257,10 @@ pub fn verify_spark_single_matrix( final_cntr, ); - let final_cntr_verifier = Verifier::new(col_config); - final_cntr_verifier - .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; + // let final_cntr_verifier = Verifier::new(col_config); + // final_cntr_verifier + // .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) + // .context("while verifying WHIR")?; let final_adr = calculate_adr(&evaluation_randomness.to_vec()); let final_mem = calculate_eq( @@ -314,7 +314,7 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, ); - a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + // a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 2db13a3f..0b626e94 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -39,25 +39,25 @@ fn main() -> Result<()> { &spark_whir_configs.a_3batched, )?; - // prove_spark_for_single_matrix( - // &mut merlin, - // spark_r1cs.b, - // &memory, - // e_values.b, - // request.claimed_values.b, - // &spark_whir_configs, - // &spark_whir_configs.b_3batched, - // )?; + prove_spark_for_single_matrix( + &mut merlin, + spark_r1cs.b, + &memory, + e_values.b, + request.claimed_values.b, + &spark_whir_configs, + &spark_whir_configs.b_3batched, + )?; - // prove_spark_for_single_matrix( - // &mut merlin, - // spark_r1cs.c, - // &memory, - // e_values.c, - // request.claimed_values.c, - // &spark_whir_configs, - // &spark_whir_configs.c_3batched, - // )?; + prove_spark_for_single_matrix( + &mut merlin, + spark_r1cs.c, + &memory, + e_values.c, + request.claimed_values.c, + &spark_whir_configs, + &spark_whir_configs.c_3batched, + )?; let spark_proof = SPARKProof { transcript: merlin.narg_string().to_vec(), @@ -79,7 +79,11 @@ fn main() -> Result<()> { whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), + whir_b3: WHIRConfigGnark::new(&spark_proof.whir_params.b_3batched), + whir_c3: WHIRConfigGnark::new(&spark_proof.whir_params.c_3batched), log_a_num_terms: next_power_of_two(r1cs.a.num_entries()), + log_b_num_terms: next_power_of_two(r1cs.b.num_entries()), + log_c_num_terms: next_power_of_two(r1cs.c.num_entries()), claimed_value_for_a: request.claimed_values.a, }; diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 20cafb08..2f9ee86f 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -71,6 +71,8 @@ pub fn prove_spark_for_single_matrix( run_spark_sumcheck(merlin, mles, claimed_value)?; let mut sumcheck_statement = Statement::::new(folding_randomness.len()); + + println!("Sch lasts {:?}", sumcheck_final_folds); let claimed_batched_value = sumcheck_final_folds[0] + @@ -178,11 +180,11 @@ pub fn prove_spark_for_single_matrix( assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - rowwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); + // rowwise_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); - sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + // let sumcheck_prover = Prover::new(batched_config.clone()); + // sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // Colwise @@ -221,13 +223,13 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&final_col_eval)?; - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_col_eval, - whir_configs.col.clone(), - final_col_ts_witness, - )?; + // produce_whir_proof( + // merlin, + // MultilinearPoint(evaluation_randomness.to_vec()), + // final_col_eval, + // whir_configs.col.clone(), + // final_col_ts_witness, + // )?; // // Colwise RS WS GPA @@ -279,11 +281,11 @@ pub fn prove_spark_for_single_matrix( assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - colwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); + // colwise_statement.add_constraint( + // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); - sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; + // let sumcheck_prover = Prover::new(batched_config.clone()); + // sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index 4b920589..e2a0aae0 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -66,8 +66,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("row_rs_address_claimed_evaluation") .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.a_3batched); + .hint("row_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.a_3batched); // // Colwise @@ -79,8 +79,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("col_final_counter_claimed_evaluation") - .add_whir_proof(&configs.col); + .hint("col_final_counter_claimed_evaluation"); + // .add_whir_proof(&configs.col); for i in 0..=next_power_of_two(r1cs.a.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -90,127 +90,127 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("col_rs_address_claimed_evaluation") .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.a_3batched); + .hint("col_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.a_3batched); // // Matrix B - // io = io - // .commit_statement(&configs.b_3batched) - // .commit_statement(&configs.b_3batched) - // .commit_statement(&configs.b_3batched) - // .commit_statement(&configs.row) - // .commit_statement(&configs.col) - // .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - // .hint("sumcheck_last_folds") - // .add_whir_proof(&configs.b_3batched); + io = io + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.b_3batched) + .commit_statement(&configs.row) + .commit_statement(&configs.col) + .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) + .hint("sumcheck_last_folds"); + // .add_whir_proof(&configs.b_3batched); - // // Rowwise + // Rowwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.b.num_rows) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.b.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Row final counter claimed evaluation") - // .add_whir_proof(&configs.row); + io = io + .hint("row_final_counter_claimed_evaluation"); + // .add_whir_proof(&configs.row); - // for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.b_3batched); + io = io + .hint("row_rs_address_claimed_evaluation") + .hint("row_rs_value_claimed_evaluation") + .hint("row_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.b_3batched); - // // Colwise + // Colwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.b.num_cols) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.b.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Col final counter claimed evaluation") - // .add_whir_proof(&configs.col); + io = io + .hint("col_final_counter_claimed_evaluation"); + // .add_whir_proof(&configs.col); - // for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.b.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.b_3batched); + io = io + .hint("col_rs_address_claimed_evaluation") + .hint("col_rs_value_claimed_evaluation") + .hint("col_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.b_3batched); // // Matrix C - // io = io - // .commit_statement(&configs.c_3batched) - // .commit_statement(&configs.c_3batched) - // .commit_statement(&configs.c_3batched) - // .commit_statement(&configs.row) - // .commit_statement(&configs.col) - // .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) - // .hint("sumcheck_last_folds") - // .add_whir_proof(&configs.c_3batched); + io = io + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.c_3batched) + .commit_statement(&configs.row) + .commit_statement(&configs.col) + .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) + .hint("sumcheck_last_folds"); + // .add_whir_proof(&configs.c_3batched); - // // Rowwise + // Rowwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.c.num_rows) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.c.num_rows) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Row final counter claimed evaluation") - // .add_whir_proof(&configs.row); + io = io + .hint("row_final_counter_claimed_evaluation"); + // .add_whir_proof(&configs.row); - // for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.c_3batched); + io = io + .hint("row_rs_address_claimed_evaluation") + .hint("row_rs_value_claimed_evaluation") + .hint("row_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.c_3batched); - // // Colwise + // Colwise - // io = io.add_tau_and_gamma(); + io = io.add_tau_and_gamma(); - // for i in 0..=next_power_of_two(r1cs.c.num_cols) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.c.num_cols) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("Col final counter claimed evaluation") - // .add_whir_proof(&configs.col); + io = io + .hint("col_final_counter_claimed_evaluation"); + // .add_whir_proof(&configs.col); - // for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - // io = io.add_sumcheck_polynomials(i); - // io = io.add_line(); - // } + for i in 0..=next_power_of_two(r1cs.c.num_entries()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } - // io = io - // .hint("RS address claimed evaluation") - // .hint("RS value claimed evaluation") - // .hint("RS timestamp claimed evaluation") - // .add_whir_proof(&configs.c_3batched); + io = io + .hint("col_rs_address_claimed_evaluation") + .hint("col_rs_value_claimed_evaluation") + .hint("col_rs_timestamp_claimed_evaluation"); + // .add_whir_proof(&configs.c_3batched); io } diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 09e3e01b..97729e41 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -62,7 +62,11 @@ pub struct SPARKProofGnark { pub whir_row: WHIRConfigGnark, pub whir_col: WHIRConfigGnark, pub whir_a3: WHIRConfigGnark, + pub whir_b3: WHIRConfigGnark, + pub whir_c3: WHIRConfigGnark, pub log_a_num_terms: usize, + pub log_b_num_terms: usize, + pub log_c_num_terms: usize, // Remove once spark testing is completed #[serde(with="serde_ark")] pub claimed_value_for_a: FieldElement, From 2f4717b90ed6d29e37d1f0612e9d098cc3d1c77e Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Tue, 30 Sep 2025 20:20:00 +0800 Subject: [PATCH 21/34] Complete --- recursive-verifier/app/circuit/circuit.go | 180 ++++++++++++-------- recursive-verifier/app/circuit/common.go | 57 +++++++ recursive-verifier/app/circuit/types.go | 7 + spark-prover/src/bin/spark-verifier.rs | 48 +++--- spark-prover/src/gpa.rs | 1 - spark-prover/src/main.rs | 17 +- spark-prover/src/spark.rs | 54 +++--- spark-prover/src/utilities/iopattern/mod.rs | 73 ++++---- spark-prover/src/utilities/mod.rs | 3 - 9 files changed, 272 insertions(+), 168 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 132ce25f..28c764b4 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -50,45 +50,48 @@ type Circuit struct { WHIRRow WHIRParams WHIRCol WHIRParams + PointRow []frontend.Variable + PointCol []frontend.Variable + SparkA SPARKMatrixData SparkB SPARKMatrixData SparkC SPARKMatrixData } func (circuit *Circuit) Define(api frontend.API) error { - // sc, arthur, uapi, err := initializeComponents(api, circuit) - // if err != nil { - // return err - // } + sc, arthur, uapi, err := initializeComponents(api, circuit) + if err != nil { + return err + } - // spartanCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) + spartanCommitment, err := parseBatchedCommitment(arthur, circuit.WHIRParamsWitness) - // if err != nil { - // return err - // } + if err != nil { + return err + } - // tRand := make([]frontend.Variable, circuit.LogNumConstraints) - // err = arthur.FillChallengeScalars(tRand) - // if err != nil { - // return err - // } + tRand := make([]frontend.Variable, circuit.LogNumConstraints) + err = arthur.FillChallengeScalars(tRand) + if err != nil { + return err + } - // spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) - // if err != nil { - // return err - // } + spartanSumcheckRand, spartanSumcheckLastValue, err := runZKSumcheck(api, sc, uapi, circuit, arthur, frontend.Variable(0), circuit.LogNumConstraints, 4, circuit.WHIRParamsHidingSpartan) + if err != nil { + return err + } - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, spartanCommitment, - // [][]frontend.Variable{{}, {}}, - // [][]frontend.Variable{}, - // ) + whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.WitnessMerkle, circuit.WitnessFirstRound, circuit.WHIRParamsWitness, [][]frontend.Variable{circuit.WitnessClaimedEvaluations, circuit.WitnessBlindingEvaluations}, circuit.WitnessLinearStatementEvaluations, spartanCommitment, + [][]frontend.Variable{{}, {}}, + [][]frontend.Variable{}, + ) - // if err != nil { - // return err - // } + if err != nil { + return err + } - // x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) - // api.AssertIsEqual(spartanSumcheckLastValue, x) + x := api.Mul(api.Sub(api.Mul(circuit.WitnessClaimedEvaluations[0], circuit.WitnessClaimedEvaluations[1]), circuit.WitnessClaimedEvaluations[2]), calculateEQ(api, spartanSumcheckRand, tRand)) + api.AssertIsEqual(spartanSumcheckLastValue, x) if circuit.UseSpark { sc := skyscraper.NewSkyscraper(api, 2) @@ -137,11 +140,11 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } } else { - // matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) + matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) - // for i := range 3 { - // api.AssertIsEqual(matrixExtensionEvals[i], circuit.WitnessLinearStatementEvaluations[i]) - // } + for i := range 3 { + api.AssertIsEqual(matrixExtensionEvals[i], circuit.WitnessLinearStatementEvaluations[i]) + } } return nil @@ -192,6 +195,18 @@ func verifyCircuit( csparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[1].Limbs) csparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[2].Limbs) + contPointRow := make([]frontend.Variable, len(hints.pointRow)) + pointRow := make([]frontend.Variable, len(hints.pointRow)) + for i := range len(hints.pointRow) { + pointRow[i] = typeConverters.LimbsToBigIntMod(hints.pointRow[i].Limbs) + } + + contPointCol := make([]frontend.Variable, len(hints.pointCol)) + pointCol := make([]frontend.Variable, len(hints.pointCol)) + for i := range len(hints.pointCol) { + pointCol[i] = typeConverters.LimbsToBigIntMod(hints.pointCol[i].Limbs) + } + fmt.Print(bsparkSumcheckLast) fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) @@ -270,7 +285,12 @@ func verifyCircuit( LogANumTerms: sparkConfig.LogANumTerms, + PointRow: contPointRow, + PointCol: contPointCol, + SparkA: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.AHints.claimed.Limbs), + SparkSumcheckLast: acontSparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), @@ -303,6 +323,8 @@ func verifyCircuit( }, SparkB: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.BHints.claimed.Limbs), + SparkSumcheckLast: bcontSparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), @@ -335,6 +357,8 @@ func verifyCircuit( }, SparkC: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.CHints.claimed.Limbs), + SparkSumcheckLast: ccontSparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), @@ -426,7 +450,12 @@ func verifyCircuit( WHIRCol: NewWhirParams(sparkConfig.WHIRCol), LogANumTerms: sparkConfig.LogANumTerms, + PointRow: pointRow, + PointCol: pointCol, + SparkA: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.AHints.claimed.Limbs), + SparkSumcheckLast: asparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), @@ -459,6 +488,8 @@ func verifyCircuit( }, SparkB: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.BHints.claimed.Limbs), + SparkSumcheckLast: bsparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), @@ -491,6 +522,8 @@ func verifyCircuit( }, SparkC: SPARKMatrixData{ + Claimed: typeConverters.LimbsToBigIntMod(hints.CHints.claimed.Limbs), + SparkSumcheckLast: csparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), @@ -654,21 +687,20 @@ func sparkSingleMatrix( } // After debug: Change 1 to actual claimed value - sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, 1, matrix.LogANumTerms, 4) + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, matrix.Claimed, matrix.LogANumTerms, 4) if err != nil { return err } - _ = sparkSumcheckFoldingRandomness - _ = sparkSumcheckLastEval + api.AssertIsEqual(sparkSumcheckLastEval, api.Mul(matrix.SparkSumcheckLast[0], matrix.SparkSumcheckLast[1], matrix.SparkSumcheckLast[2])) - // whirFoldingRandomness, err := RunZKWhir(api, arthur, uapi, sc, circuit.SparkSumcheckMerkle, circuit.SparkSumcheckFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckBatchingRandomness, sumcheckInitialOODQueries, sumcheckInitialOODAnswers, sumcheckRootHash, - // [][]frontend.Variable{{circuit.SparkSumcheckLast[0]}, {circuit.SparkSumcheckLast[1]}, {circuit.SparkSumcheckLast[2]}}, - // [][]frontend.Variable{sparkSumcheckFoldingRandomness}, - // ) - // if err != nil { - // return err - // } + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.SparkSumcheckMerkle, matrix.SparkSumcheckFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckCommitment, + [][]frontend.Variable{{matrix.SparkSumcheckLast[0]}, {matrix.SparkSumcheckLast[1]}, {matrix.SparkSumcheckLast[2]}}, + [][]frontend.Variable{sparkSumcheckFoldingRandomness}, + ) + if err != nil { + return err + } // Rowwise @@ -680,7 +712,7 @@ func sparkSingleMatrix( gamma := tauGammaTemp[1] // Change this debug statement - gpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) + gpaResult, err := gpaSumcheckVerifier(api, arthur, len(circuit.PointRow)+2) if err != nil { return err } @@ -692,15 +724,18 @@ func sparkSingleMatrix( evaluation_randomness := gpaResult.randomness[1:] addr := CalculateAdr(api, evaluation_randomness) - mem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, evaluation_randomness) + mem := calculateEQ(api, circuit.PointRow, evaluation_randomness) init_cntr := 0 init_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), init_cntr), tau) - // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.RowFinalMerkle, circuit.RowFinalMerkleFirstRound, circuit.WHIRRow, [][]frontend.Variable{{}}, []frontend.Variable{}, rowFinaltsBatchingRandomness, rowFinaltsInitialOODQueries, rowFinaltsInitialOODAnswers, rowFinaltsRootHash, - // [][]frontend.Variable{{matrix.RowFinalCounter}}, - // [][]frontend.Variable{evaluation_randomness}, - // ) + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowFinalMerkle, matrix.RowFinalMerkleFirstRound, circuit.WHIRRow, [][]frontend.Variable{{}}, []frontend.Variable{}, rowFinalCommitment, + [][]frontend.Variable{{matrix.RowFinalCounter}}, + [][]frontend.Variable{evaluation_randomness}, + ) + if err != nil { + return err + } final_opening := api.Sub(api.Add(api.Mul(addr, gamma, gamma), api.Mul(mem, gamma), matrix.RowFinalCounter), tau) @@ -709,7 +744,7 @@ func sparkSingleMatrix( api.AssertIsEqual(gpaResult.lastSumcheckValue, evaluated_value) // Change this after debug - gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogANumTerms+2) if err != nil { return err } @@ -727,24 +762,18 @@ func sparkSingleMatrix( api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) - // _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, - // [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, - // [][]frontend.Variable{rsws_evaluation_randomness}, - // ) - _ = rsws_evaluation_randomness + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, + [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, + [][]frontend.Variable{rsws_evaluation_randomness}, + ) + if err != nil { + return err + } api.AssertIsEqual(api.Mul(claimedInit, claimedWS), api.Mul(claimedRS, claimedFinal)) // Colwise - _ = sumcheckCommitment - _ = colwiseCommitment - _ = rowwiseCommitment - _ = rowFinalCommitment - _ = colFinalCommitment - - // Colwise - colwiseTauGammaTemp := make([]frontend.Variable, 2) if err := arthur.FillChallengeScalars(colwiseTauGammaTemp); err != nil { return err @@ -753,7 +782,7 @@ func sparkSingleMatrix( colwiseGamma := colwiseTauGammaTemp[1] // Change this debug statement - colwiseInitFinalGpaResult, err := gpaSumcheckVerifier(api, arthur, 8+2) + colwiseInitFinalGpaResult, err := gpaSumcheckVerifier(api, arthur, len(circuit.PointCol)-1+2) if err != nil { return err } @@ -765,23 +794,26 @@ func sparkSingleMatrix( colwiseEvaluation_randomness := colwiseInitFinalGpaResult.randomness[1:] colwiseaddr := CalculateAdr(api, colwiseEvaluation_randomness) - // Add necessary multiplier here - colwisemem := calculateEQ(api, []frontend.Variable{0, 0, 0, 0, 0, 0, 0, 0}, colwiseEvaluation_randomness) + + colwisemem := api.Mul(calculateEQ(api, circuit.PointCol[1:], colwiseEvaluation_randomness), api.Sub(1, circuit.PointCol[0])) colwiseinit_cntr := 0 colwiseinit_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), colwiseinit_cntr), colwiseTau) - // _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, - // [][]frontend.Variable{{matrix.ColFinalCounter}}, - // [][]frontend.Variable{colwiseEvaluation_randomness}, - // ) + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, + [][]frontend.Variable{{matrix.ColFinalCounter}}, + [][]frontend.Variable{colwiseEvaluation_randomness}, + ) + if err != nil { + return err + } colwisefinal_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), matrix.ColFinalCounter), colwiseTau) colwiseevaluated_value := api.Add(api.Mul(colwiseinit_opening, api.Sub(1, colwiseLast_randomness)), api.Mul(colwisefinal_opening, colwiseLast_randomness)) api.AssertIsEqual(colwiseInitFinalGpaResult.lastSumcheckValue, colwiseevaluated_value) // Colwise RS WS - colwisegpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, 8+2) + colwisegpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogANumTerms+2) if err != nil { return err } @@ -799,11 +831,13 @@ func sparkSingleMatrix( api.AssertIsEqual(colwisegpaResultRSWS.lastSumcheckValue, colwisersws_evaluated_value) - // _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, - // [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, - // [][]frontend.Variable{colwisersws_evaluation_randomness}, - // ) - _ = colwisersws_evaluation_randomness + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, + [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, + [][]frontend.Variable{colwisersws_evaluation_randomness}, + ) + if err != nil { + return err + } api.AssertIsEqual(api.Mul(colwiseClaimedInit, colwiseClaimedWS), api.Mul(colwiseClaimedRS, colwiseClaimedFinal)) diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 015af622..34d47dad 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -130,6 +130,12 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var colRSValueEvaluation []Fp256 var colRSTimestampEvaluation []Fp256 + var claimedA Fp256 + var claimedB Fp256 + var claimedC Fp256 + var pointRow []Fp256 + var pointCol []Fp256 + for _, op := range spark_io.Ops { switch op.Kind { case gnarkNimue.Hint: @@ -261,6 +267,51 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, return fmt.Errorf("failed to deserialize col_rs_timestamp_claimed_evaluation : %w", err) } colRSTimestampEvaluation = append(colRSTimestampEvaluation, temp) + case "claimed_a": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &claimedA, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } + case "claimed_b": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &claimedB, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } + case "claimed_c": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &claimedC, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } + case "point_row": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &pointRow, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } + case "point_col": + _, err = arkSerialize.CanonicalDeserializeWithMode( + bytes.NewReader(sparkConfig.Transcript[start:end]), + &pointCol, + false, false, + ) + if err != nil { + return fmt.Errorf("failed to deserialize row_rs_address_claimed_evaluation : %w", err) + } } if err != nil { @@ -322,10 +373,14 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var ccolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ + pointRow: pointRow, + pointCol: pointCol, + witnessHints: witnessData, spartanHidingHint: hidingSpartanData, AHints: SparkMatrixHints{ + claimed: claimedA, sparkSumcheckData: asparkSumcheckData, rowFinalMerkle: arowFinal, rowwiseSparkMerkle: arowwiseSparkMerkle, @@ -346,6 +401,7 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, }, BHints: SparkMatrixHints{ + claimed: claimedB, sparkSumcheckData: bsparkSumcheckData, rowFinalMerkle: browFinal, rowwiseSparkMerkle: browwiseSparkMerkle, @@ -366,6 +422,7 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, }, CHints: SparkMatrixHints{ + claimed: claimedC, sparkSumcheckData: csparkSumcheckData, rowFinalMerkle: crowFinal, rowwiseSparkMerkle: crowwiseSparkMerkle, diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index 22b67cf5..f1c97f99 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -107,6 +107,9 @@ type Config struct { } type Hints struct { + pointRow []Fp256 + pointCol []Fp256 + witnessHints ZKHint spartanHidingHint ZKHint @@ -167,6 +170,8 @@ type Commitment struct { } type SPARKMatrixData struct { + Claimed frontend.Variable + WHIRA3 WHIRParams LogANumTerms int @@ -199,6 +204,8 @@ type SPARKMatrixData struct { } type SparkMatrixHints struct { + claimed Fp256 + sparkSumcheckData ZKHint rowFinalMerkle ZKHint rowwiseSparkMerkle ZKHint diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 63a37a6e..082c8be6 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -38,6 +38,12 @@ fn main() -> Result<()> { let io = IOPattern::from_string(spark_proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&spark_proof.transcript); + let claimed_a: FieldElement = arthur.hint()?; + let claimed_b: FieldElement = arthur.hint()?; + let claimed_c: FieldElement = arthur.hint()?; + let point_row: Vec = arthur.hint()?; + let point_col: Vec = arthur.hint()?; + verify_spark_single_matrix( &spark_proof.whir_params.row, &spark_proof.whir_params.col, @@ -119,15 +125,15 @@ pub fn verify_spark_single_matrix( num_nonzero_terms, )); - // a_spark_sumcheck_statement_verifier.add_constraint( - // Weights::evaluation(MultilinearPoint(randomness.clone())), - // final_folds[0] + - // final_folds[1] * a_sumcheck_commitment.batching_randomness + - // final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, - // ); + a_spark_sumcheck_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(randomness.clone())), + final_folds[0] + + final_folds[1] * a_sumcheck_commitment.batching_randomness + + final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, + ); - // let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); - // a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; + let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; // Matrix A - Rowwise @@ -156,8 +162,6 @@ pub fn verify_spark_single_matrix( let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; let final_cntr: FieldElement = arthur.hint()?; - println!("Final cntr {:?}", final_cntr); //Reilabs Debug: - let mut final_cntr_statement = Statement::::new(next_power_of_two(num_rows)); final_cntr_statement.add_constraint( @@ -165,10 +169,10 @@ pub fn verify_spark_single_matrix( final_cntr, ); - // let final_cntr_verifier = Verifier::new(row_config); - // final_cntr_verifier - // .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) - // .context("while verifying WHIR")?; + let final_cntr_verifier = Verifier::new(row_config); + final_cntr_verifier + .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; let final_adr = calculate_adr(&evaluation_randomness.to_vec()); let final_mem = calculate_eq( @@ -216,7 +220,7 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, ); - // a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -257,10 +261,10 @@ pub fn verify_spark_single_matrix( final_cntr, ); - // let final_cntr_verifier = Verifier::new(col_config); - // final_cntr_verifier - // .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) - // .context("while verifying WHIR")?; + let final_cntr_verifier = Verifier::new(col_config); + final_cntr_verifier + .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) + .context("while verifying WHIR")?; let final_adr = calculate_adr(&evaluation_randomness.to_vec()); let final_mem = calculate_eq( @@ -297,10 +301,6 @@ pub fn verify_spark_single_matrix( let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - println!("{:?}", rs_opening); //Reilabs Debug: - println!("{:?}", rs_adr); //Reilabs Debug: - println!("{:?}", evaluated_value); //Reilabs Debug: - println!("{:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( @@ -314,7 +314,7 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, ); - // a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); diff --git a/spark-prover/src/gpa.rs b/spark-prover/src/gpa.rs index cfa1273e..724a1850 100644 --- a/spark-prover/src/gpa.rs +++ b/spark-prover/src/gpa.rs @@ -54,7 +54,6 @@ pub fn run_gpa( fn calculate_binary_multiplication_tree( array_to_prove: Vec, ) -> Vec> { - println!("{:?}", array_to_prove.len()); assert!(array_to_prove.len() == 1 << next_power_of_two(array_to_prove.len())); let mut layers = vec![]; let mut current_layer = array_to_prove; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 0b626e94..e402ca00 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,6 +1,6 @@ use { anyhow::{Context, Result}, - provekit_common::{file::write, utils::next_power_of_two, gnark::WHIRConfigGnark}, + provekit_common::{file::write, gnark::WHIRConfigGnark, utils::next_power_of_two}, spark_prover::{ memory::{calculate_e_values_for_r1cs, calculate_memory}, spark::prove_spark_for_single_matrix, @@ -10,7 +10,7 @@ use { }, whir::create_whir_configs, }, - std::{fs::File, io::Write, mem}, + std::{fs::File, io::Write, mem}, whir::whir::utils::HintSerialize, }; fn main() -> Result<()> { @@ -24,11 +24,19 @@ fn main() -> Result<()> { let request = deserialize_request("spark-prover/request.json") .context("Error: Failed to deserialize the request object")?; - let memory = calculate_memory(request.point_to_evaluate); + let memory = calculate_memory(request.point_to_evaluate.clone()); let e_values = calculate_e_values_for_r1cs(&memory, &r1cs); let io_pattern = create_io_pattern(&r1cs, &spark_whir_configs); let mut merlin = io_pattern.to_prover_state(); + merlin.hint(&request.claimed_values.a)?; + merlin.hint(&request.claimed_values.b)?; + merlin.hint(&request.claimed_values.c)?; + + merlin.hint(&request.point_to_evaluate.row)?; + merlin.hint(&request.point_to_evaluate.col)?; + + prove_spark_for_single_matrix( &mut merlin, spark_r1cs.a, @@ -84,14 +92,11 @@ fn main() -> Result<()> { log_a_num_terms: next_power_of_two(r1cs.a.num_entries()), log_b_num_terms: next_power_of_two(r1cs.b.num_entries()), log_c_num_terms: next_power_of_two(r1cs.c.num_entries()), - claimed_value_for_a: request.claimed_values.a, }; let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") .context("Error: Failed to create the spark proof file")?; - println!("IO{:?}", spark_proof_gnark.io_pattern); //Reilabs Debug: - gnark_spark_proof_file .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) .expect("Writing spark gnark parameters to a file failed"); diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 2f9ee86f..b4b7b92c 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -71,19 +71,17 @@ pub fn prove_spark_for_single_matrix( run_spark_sumcheck(merlin, mles, claimed_value)?; let mut sumcheck_statement = Statement::::new(folding_randomness.len()); - - println!("Sch lasts {:?}", sumcheck_final_folds); let claimed_batched_value = sumcheck_final_folds[0] + sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + sumcheck_final_folds[2] * sumcheck_witness.batching_randomness * sumcheck_witness.batching_randomness; - // sumcheck_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); + sumcheck_statement.add_constraint( + Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - // let sumcheck_prover = Prover::new(batched_config.clone()); - // sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; + let sumcheck_prover = Prover::new(batched_config.clone()); + sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise @@ -122,13 +120,13 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&final_row_eval)?; - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // final_row_eval, - // whir_configs.row.clone(), - // final_row_ts_witness, - // )?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_row_eval, + whir_configs.row.clone(), + final_row_ts_witness, + )?; // // Rowwise RS WS GPA @@ -180,11 +178,11 @@ pub fn prove_spark_for_single_matrix( assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - // rowwise_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); + rowwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - // let sumcheck_prover = Prover::new(batched_config.clone()); - // sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + let sumcheck_prover = Prover::new(batched_config.clone()); + sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // Colwise @@ -223,13 +221,13 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&final_col_eval)?; - // produce_whir_proof( - // merlin, - // MultilinearPoint(evaluation_randomness.to_vec()), - // final_col_eval, - // whir_configs.col.clone(), - // final_col_ts_witness, - // )?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_col_eval, + whir_configs.col.clone(), + final_col_ts_witness, + )?; // // Colwise RS WS GPA @@ -281,11 +279,11 @@ pub fn prove_spark_for_single_matrix( assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - // colwise_statement.add_constraint( - // Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); + colwise_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - // let sumcheck_prover = Prover::new(batched_config.clone()); - // sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; + let sumcheck_prover = Prover::new(batched_config.clone()); + sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) } diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index e2a0aae0..b0b892e0 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -32,6 +32,13 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { let mut io = IOPattern::new("💥"); // Matrix A + io = io + .hint("claimed_a") + .hint("claimed_b") + .hint("claimed_c") + .hint("point_row") + .hint("point_col"); + io = io .commit_statement(&configs.a_3batched) @@ -40,8 +47,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .commit_statement(&configs.row) .commit_statement(&configs.col) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds"); - // .add_whir_proof(&configs.a_3batched); + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.a_3batched); // Rowwise @@ -53,8 +60,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("row_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.row); + .hint("row_final_counter_claimed_evaluation") + .add_whir_proof(&configs.row); // Can I send all hints once in struct? @@ -66,10 +73,10 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("row_rs_address_claimed_evaluation") .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.a_3batched); + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.a_3batched); - // // Colwise + // Colwise io = io.add_tau_and_gamma(); @@ -79,8 +86,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("col_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.col); + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&configs.col); for i in 0..=next_power_of_two(r1cs.a.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -90,10 +97,10 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("col_rs_address_claimed_evaluation") .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.a_3batched); + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.a_3batched); - // // Matrix B + // Matrix B io = io .commit_statement(&configs.b_3batched) @@ -102,8 +109,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .commit_statement(&configs.row) .commit_statement(&configs.col) .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds"); - // .add_whir_proof(&configs.b_3batched); + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.b_3batched); // Rowwise @@ -115,8 +122,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("row_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.row); + .hint("row_final_counter_claimed_evaluation") + .add_whir_proof(&configs.row); for i in 0..=next_power_of_two(r1cs.b.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -126,8 +133,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("row_rs_address_claimed_evaluation") .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.b_3batched); + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.b_3batched); // Colwise @@ -139,8 +146,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("col_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.col); + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&configs.col); for i in 0..=next_power_of_two(r1cs.b.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -150,10 +157,10 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("col_rs_address_claimed_evaluation") .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.b_3batched); + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.b_3batched); - // // Matrix C + // Matrix C io = io .commit_statement(&configs.c_3batched) @@ -162,8 +169,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { .commit_statement(&configs.row) .commit_statement(&configs.col) .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) - .hint("sumcheck_last_folds"); - // .add_whir_proof(&configs.c_3batched); + .hint("sumcheck_last_folds") + .add_whir_proof(&configs.c_3batched); // Rowwise @@ -175,8 +182,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("row_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.row); + .hint("row_final_counter_claimed_evaluation") + .add_whir_proof(&configs.row); for i in 0..=next_power_of_two(r1cs.c.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -186,8 +193,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("row_rs_address_claimed_evaluation") .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.c_3batched); + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.c_3batched); // Colwise @@ -199,8 +206,8 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { } io = io - .hint("col_final_counter_claimed_evaluation"); - // .add_whir_proof(&configs.col); + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&configs.col); for i in 0..=next_power_of_two(r1cs.c.num_entries()) { io = io.add_sumcheck_polynomials(i); @@ -210,7 +217,7 @@ pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { io = io .hint("col_rs_address_claimed_evaluation") .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation"); - // .add_whir_proof(&configs.c_3batched); + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&configs.c_3batched); io } diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 97729e41..27904b0d 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -67,7 +67,4 @@ pub struct SPARKProofGnark { pub log_a_num_terms: usize, pub log_b_num_terms: usize, pub log_c_num_terms: usize, - // Remove once spark testing is completed - #[serde(with="serde_ark")] - pub claimed_value_for_a: FieldElement, } \ No newline at end of file From c6218eb4e8ea89187c19ee8d26131d5ce62b819c Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 2 Oct 2025 15:04:34 +0800 Subject: [PATCH 22/34] Bugg: RLC optimization - val not properly committed --- spark-prover/src/bin/spark-verifier.rs | 42 +-- spark-prover/src/main.rs | 296 ++++++++++++++++---- spark-prover/src/spark.rs | 13 +- spark-prover/src/utilities/iopattern/mod.rs | 7 +- spark-prover/src/utilities/mod.rs | 16 +- spark-prover/src/whir.rs | 7 + 6 files changed, 279 insertions(+), 102 deletions(-) diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 082c8be6..14c5396a 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -38,48 +38,32 @@ fn main() -> Result<()> { let io = IOPattern::from_string(spark_proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&spark_proof.transcript); - let claimed_a: FieldElement = arthur.hint()?; - let claimed_b: FieldElement = arthur.hint()?; - let claimed_c: FieldElement = arthur.hint()?; let point_row: Vec = arthur.hint()?; let point_col: Vec = arthur.hint()?; + + let mut claimed_values = [FieldElement::from(0); 3]; + arthur.fill_next_scalars(&mut claimed_values)?; - verify_spark_single_matrix( - &spark_proof.whir_params.row, - &spark_proof.whir_params.col, - &spark_proof.whir_params.a_3batched, - spark_proof.matrix_dimensions.num_rows, - spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.a_nonzero_terms, - &mut arthur, - &request, - &request.claimed_values.a, - )?; + let mut matrix_batching_randomness = [FieldElement::from(0); 1]; + arthur.fill_challenge_scalars(&mut matrix_batching_randomness)?; + let matrix_batching_randomness = matrix_batching_randomness[0]; + let claimed_value = + claimed_values[0] * matrix_batching_randomness * matrix_batching_randomness + + claimed_values[1] * matrix_batching_randomness + + claimed_values[2]; verify_spark_single_matrix( &spark_proof.whir_params.row, &spark_proof.whir_params.col, - &spark_proof.whir_params.b_3batched, + &spark_proof.whir_params.num_terms_3batched, spark_proof.matrix_dimensions.num_rows, spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.b_nonzero_terms, + spark_proof.matrix_dimensions.nonzero_terms, &mut arthur, &request, - &request.claimed_values.b, + &claimed_value, )?; - verify_spark_single_matrix( - &spark_proof.whir_params.row, - &spark_proof.whir_params.col, - &spark_proof.whir_params.c_3batched, - spark_proof.matrix_dimensions.num_rows, - spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.c_nonzero_terms, - &mut arthur, - &request, - &request.claimed_values.c, - )?; - Ok(()) } diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index e402ca00..faef9438 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,77 +1,251 @@ use { - anyhow::{Context, Result}, - provekit_common::{file::write, gnark::WHIRConfigGnark, utils::next_power_of_two}, - spark_prover::{ - memory::{calculate_e_values_for_r1cs, calculate_memory}, - spark::prove_spark_for_single_matrix, + anyhow::{Context, Result}, ark_ff::AdditiveGroup, provekit_common::{file::write, gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme}, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::{ + memory::{calculate_e_values_for_r1cs, calculate_memory, EValuesForMatrix, Memory}, + spark::{prove_spark_for_single_matrix, run_spark_sumcheck}, utilities::{ - calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, - get_spark_r1cs, SPARKProof, SPARKProofGnark, + calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, SparkMatrix, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark }, - whir::create_whir_configs, - }, - std::{fs::File, io::Write, mem}, whir::whir::utils::HintSerialize, + whir::{create_whir_configs, SPARKWHIRConfigsNew}, + }, spongefish::codecs::arkworks_algebra::{FieldDomainSeparator, FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, mem}, whir::{poly_utils::evals::EvaluationsList, whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, statement::Statement, utils::HintSerialize}} }; fn main() -> Result<()> { // Run once when receiving the matrix let r1cs = deserialize_r1cs("spark-prover/r1cs.json") .context("Error: Failed to create the R1CS object")?; - let spark_r1cs = get_spark_r1cs(&r1cs); - let spark_whir_configs = create_whir_configs(&r1cs); + + // get combined matrix non-zero value coordinates + + let mut combined_matrix_map: BTreeMap<(usize, usize), FieldElement> = r1cs.a().iter().map(|(coordinate, _)| (coordinate, FieldElement::ZERO)).collect(); + for (coordinate, _) in r1cs.b().iter() { + combined_matrix_map.entry(coordinate).or_insert(FieldElement::ZERO); + } + for (coordinate, _) in r1cs.c().iter() { + combined_matrix_map.entry(coordinate).or_insert(FieldElement::ZERO); + } + + // generate padded row and col + + let originial_num_entries = combined_matrix_map.keys().count(); + let padded_num_entries = 1<>(); + + let final_col = read_col_counters + .iter() + .map(|&x| FieldElement::from(x as u64)) + .collect::>(); // Run for each request let request = deserialize_request("spark-prover/request.json") .context("Error: Failed to deserialize the request object")?; let memory = calculate_memory(request.point_to_evaluate.clone()); - let e_values = calculate_e_values_for_r1cs(&memory, &r1cs); - let io_pattern = create_io_pattern(&r1cs, &spark_whir_configs); - let mut merlin = io_pattern.to_prover_state(); - merlin.hint(&request.claimed_values.a)?; - merlin.hint(&request.claimed_values.b)?; - merlin.hint(&request.claimed_values.c)?; + let mut e_rx = Vec::with_capacity(padded_num_entries); + let mut e_ry = Vec::with_capacity(padded_num_entries); + + for (r, c) in combined_matrix_map.keys() { + e_rx.push(memory.eq_rx[*r]); + e_ry.push(memory.eq_ry[*c]); + } + + e_rx.extend(std::iter::repeat(memory.eq_rx[0]).take(to_fill)); + e_ry.extend(std::iter::repeat(memory.eq_ry[0]).take(to_fill)); + + // Create whir config + let row_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_constraints()), 1); + let col_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_witnesses()), 1); + let num_terms_3batched_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); + + // Create io_pattern + let mut io = IOPattern::new("💥"); + + // Matrix A + io = io + .hint("point_row") + .hint("point_col") + .add_claimed_evaluations(); + + + io = io + .commit_statement(&num_terms_3batched_config) + .commit_statement(&num_terms_3batched_config) + .commit_statement(&num_terms_3batched_config) + .commit_statement(&row_config) + .commit_statement(&col_config) + .add_sumcheck_polynomials(next_power_of_two(padded_num_entries)) + .hint("sumcheck_last_folds") + .add_whir_proof(&num_terms_3batched_config); + + // Rowwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.num_constraints()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("row_final_counter_claimed_evaluation") + .add_whir_proof(&row_config); + + // Can I send all hints once in struct? + + for i in 0..=next_power_of_two(padded_num_entries) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("row_rs_address_claimed_evaluation") + .hint("row_rs_value_claimed_evaluation") + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&num_terms_3batched_config); + + // Colwise + + io = io.add_tau_and_gamma(); + + for i in 0..=next_power_of_two(r1cs.num_witnesses()) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&col_config); + + for i in 0..=next_power_of_two(padded_num_entries) { + io = io.add_sumcheck_polynomials(i); + io = io.add_line(); + } + + io = io + .hint("col_rs_address_claimed_evaluation") + .hint("col_rs_value_claimed_evaluation") + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&num_terms_3batched_config); + + + // Prover + + let mut merlin = io.to_prover_state(); merlin.hint(&request.point_to_evaluate.row)?; merlin.hint(&request.point_to_evaluate.col)?; + // Calculate the RLC of the matrices + merlin.add_scalars(&[request.claimed_values.a, request.claimed_values.b, request.claimed_values.c])?; + let mut matrix_batching_randomness = [FieldElement::ZERO; 1]; + merlin.fill_challenge_scalars(&mut matrix_batching_randomness)?; + let matrix_batching_randomness = matrix_batching_randomness[0]; + let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; - prove_spark_for_single_matrix( - &mut merlin, - spark_r1cs.a, - &memory, - e_values.a, - request.claimed_values.a, - &spark_whir_configs, - &spark_whir_configs.a_3batched, - )?; + for (coordinate, value) in r1cs.a().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness_sq); + } - prove_spark_for_single_matrix( - &mut merlin, - spark_r1cs.b, - &memory, - e_values.b, - request.claimed_values.b, - &spark_whir_configs, - &spark_whir_configs.b_3batched, - )?; + for (coordinate, value) in r1cs.b().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness); + } + + for (coordinate, value) in r1cs.c().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value ); + } + let mut val = Vec::with_capacity(padded_num_entries); + for value in combined_matrix_map.values() { + val.push(*value); + } + val.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); + + let claimed_value = request.claimed_values.a * matrix_batching_randomness_sq + + request.claimed_values.b * matrix_batching_randomness + + request.claimed_values.c; + + let spark_matrix = SparkMatrix { + coo: COOMatrix{ + row, + col, + val, + }, + timestamps: TimeStamps { + read_row, + read_col, + final_row, + final_col, + } + }; + + + let e_values = EValuesForMatrix { + e_rx, + e_ry, + }; + + let configs = SPARKWHIRConfigsNew { + row: row_config, + col: col_config, + num_terms_3batched: num_terms_3batched_config, + }; + prove_spark_for_single_matrix( &mut merlin, - spark_r1cs.c, + spark_matrix, &memory, - e_values.c, - request.claimed_values.c, - &spark_whir_configs, - &spark_whir_configs.c_3batched, + e_values, + claimed_value, + &configs, )?; let spark_proof = SPARKProof { transcript: merlin.narg_string().to_vec(), - io_pattern: String::from_utf8(io_pattern.as_bytes().to_vec()).unwrap(), - whir_params: spark_whir_configs, - matrix_dimensions: calculate_matrix_dimensions(&r1cs), + io_pattern: String::from_utf8(io.as_bytes().to_vec()).unwrap(), + whir_params: configs, + matrix_dimensions: MatrixDimensionsNew { + num_rows: r1cs.num_constraints(), + num_cols: r1cs.num_witnesses(), + nonzero_terms: originial_num_entries, + }, }; let mut spark_proof_file = File::create("spark-prover/spark_proof.json") @@ -81,25 +255,25 @@ fn main() -> Result<()> { .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) .expect("Writing gnark parameters to a file failed"); - let spark_proof_gnark = SPARKProofGnark { - transcript: spark_proof.transcript, - io_pattern: spark_proof.io_pattern, - whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), - whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), - whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), - whir_b3: WHIRConfigGnark::new(&spark_proof.whir_params.b_3batched), - whir_c3: WHIRConfigGnark::new(&spark_proof.whir_params.c_3batched), - log_a_num_terms: next_power_of_two(r1cs.a.num_entries()), - log_b_num_terms: next_power_of_two(r1cs.b.num_entries()), - log_c_num_terms: next_power_of_two(r1cs.c.num_entries()), - }; + // let spark_proof_gnark = SPARKProofGnark { + // transcript: spark_proof.transcript, + // io_pattern: spark_proof.io_pattern, + // whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), + // whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), + // whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), + // whir_b3: WHIRConfigGnark::new(&spark_proof.whir_params.b_3batched), + // whir_c3: WHIRConfigGnark::new(&spark_proof.whir_params.c_3batched), + // log_a_num_terms: next_power_of_two(padded_num_entries), + // log_b_num_terms: next_power_of_two(r1cs.b.num_entries()), + // log_c_num_terms: next_power_of_two(r1cs.c.num_entries()), + // }; - let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") - .context("Error: Failed to create the spark proof file")?; + // let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") + // .context("Error: Failed to create the spark proof file")?; - gnark_spark_proof_file - .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) - .expect("Writing spark gnark parameters to a file failed"); + // gnark_spark_proof_file + // .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) + // .expect("Writing spark gnark parameters to a file failed"); Ok(()) } diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index b4b7b92c..6c092ebd 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -3,7 +3,7 @@ use { gpa::run_gpa, memory::{EValuesForMatrix, Memory}, utilities::matrix::SparkMatrix, - whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigs}, + whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigsNew}, }, anyhow::{ensure, Result}, itertools::izip, @@ -31,12 +31,11 @@ pub fn prove_spark_for_single_matrix( memory: &Memory, e_values: EValuesForMatrix, claimed_value: FieldElement, - whir_configs: &SPARKWHIRConfigs, - batched_config: &WhirConfig, + whir_configs: &SPARKWHIRConfigsNew, ) -> Result<()> { let row_committer = CommitmentWriter::new(whir_configs.row.clone()); let col_committer = CommitmentWriter::new(whir_configs.col.clone()); - let batched_committer = CommitmentWriter::new(batched_config.clone()); + let batched_committer = CommitmentWriter::new(whir_configs.num_terms_3batched.clone()); let sumcheck_witness = batched_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(), @@ -80,7 +79,7 @@ pub fn prove_spark_for_single_matrix( sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover::new(batched_config.clone()); + let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise @@ -181,7 +180,7 @@ pub fn prove_spark_for_single_matrix( rowwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); + let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; // Colwise @@ -282,7 +281,7 @@ pub fn prove_spark_for_single_matrix( colwise_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - let sumcheck_prover = Prover::new(batched_config.clone()); + let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; Ok(()) diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs index b0b892e0..3657a3f6 100644 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ b/spark-prover/src/utilities/iopattern/mod.rs @@ -10,8 +10,8 @@ use { pub trait SPARKDomainSeparator { fn add_tau_and_gamma(self) -> Self; - fn add_line(self) -> Self; + fn add_claimed_evaluations(self) -> Self; } impl SPARKDomainSeparator for IOPattern @@ -26,6 +26,11 @@ where self.add_scalars(2, "gpa line") .challenge_scalars(1, "gpa line random") } + + fn add_claimed_evaluations(self) -> Self { + self.add_scalars(3, "claimed evaluations") + .challenge_scalars(1, "matrix combination randomness") + } } pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 27904b0d..1456532f 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -1,7 +1,7 @@ -mod iopattern; +pub mod iopattern; pub mod matrix; use { - crate::whir::SPARKWHIRConfigs, + crate::whir::{SPARKWHIRConfigs, SPARKWHIRConfigsNew}, anyhow::{Context, Result}, provekit_common::{ gnark::WHIRConfigGnark, spark::SPARKRequest, utils::{next_power_of_two, serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, WhirConfig, R1CS @@ -32,8 +32,8 @@ pub fn deserialize_request(path_str: &str) -> Result { pub struct SPARKProof { pub transcript: Vec, pub io_pattern: String, - pub whir_params: SPARKWHIRConfigs, - pub matrix_dimensions: MatrixDimensions, + pub whir_params: SPARKWHIRConfigsNew, + pub matrix_dimensions: MatrixDimensionsNew, } #[derive(Serialize, Deserialize)] @@ -45,6 +45,14 @@ pub struct MatrixDimensions { pub c_nonzero_terms: usize, } + +#[derive(Serialize, Deserialize)] +pub struct MatrixDimensionsNew { + pub num_rows: usize, + pub num_cols: usize, + pub nonzero_terms: usize, +} + pub fn calculate_matrix_dimensions(r1cs: &R1CS) -> MatrixDimensions { MatrixDimensions { num_rows: r1cs.a.num_rows, diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index d6dbad23..899aa98c 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -46,6 +46,13 @@ pub struct SPARKWHIRConfigs { pub c_3batched: WhirConfig, } +#[derive(Serialize, Deserialize)] +pub struct SPARKWHIRConfigsNew { + pub row: WhirConfig, + pub col: WhirConfig, + pub num_terms_3batched: WhirConfig, +} + pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { SPARKWHIRConfigs { row: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_rows), 1), From 0462090360cbc0760b60b96e2070c9698da40da3 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Thu, 2 Oct 2025 16:43:39 +0800 Subject: [PATCH 23/34] Rust rlc prover/verifier prototype --- spark-prover/src/bin/spark-verifier.rs | 59 +++++++++++-------- spark-prover/src/main.rs | 72 +++++++++++++++++++----- spark-prover/src/spark.rs | 52 ++++++++++++----- spark-prover/src/utilities/matrix/mod.rs | 14 +++++ spark-prover/src/whir.rs | 1 + 5 files changed, 150 insertions(+), 48 deletions(-) diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index 14c5396a..c8b05b3e 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -7,7 +7,7 @@ use { sumcheck::{calculate_eq, eval_cubic_poly}, }, FieldElement, IOPattern, WhirConfig }, - spark_prover::utilities::SPARKProof, + spark_prover::{utilities::SPARKProof, whir::SPARKWHIRConfigsNew}, spongefish::{ codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, VerifierState, @@ -47,15 +47,15 @@ fn main() -> Result<()> { let mut matrix_batching_randomness = [FieldElement::from(0); 1]; arthur.fill_challenge_scalars(&mut matrix_batching_randomness)?; let matrix_batching_randomness = matrix_batching_randomness[0]; + let claimed_value = - claimed_values[0] * matrix_batching_randomness * matrix_batching_randomness + claimed_values[0] + claimed_values[1] * matrix_batching_randomness - + claimed_values[2]; + + claimed_values[2] * matrix_batching_randomness * matrix_batching_randomness; verify_spark_single_matrix( - &spark_proof.whir_params.row, - &spark_proof.whir_params.col, - &spark_proof.whir_params.num_terms_3batched, + &matrix_batching_randomness, + &spark_proof.whir_params, spark_proof.matrix_dimensions.num_rows, spark_proof.matrix_dimensions.num_cols, spark_proof.matrix_dimensions.nonzero_terms, @@ -68,9 +68,8 @@ fn main() -> Result<()> { } pub fn verify_spark_single_matrix( - row_config: &WhirConfig, - col_config: &WhirConfig, - num_nonzero_term_batched3_config: &WhirConfig, + matrix_batching_randomness: &FieldElement, + whir_params: &SPARKWHIRConfigsNew, num_rows: usize, num_cols: usize, num_nonzero_terms: usize, @@ -78,14 +77,15 @@ pub fn verify_spark_single_matrix( request: &SPARKRequest, claimed_value: &FieldElement, ) -> Result<()> { - let commitment_reader_row = CommitmentReader::new(row_config); - let commitment_reader_col = CommitmentReader::new(col_config); + let commitment_reader_row = CommitmentReader::new(&whir_params.row); + let commitment_reader_col = CommitmentReader::new(&whir_params.col); // Matrix A - let a_3batched_commitment_reader = CommitmentReader::new(num_nonzero_term_batched3_config); + let a_3batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_3batched); + let a_5batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_5batched); - let a_sumcheck_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let a_sumcheck_commitment = a_5batched_commitment_reader.parse_commitment(arthur)?; let a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; let a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; @@ -103,20 +103,33 @@ pub fn verify_spark_single_matrix( let final_folds: Vec = arthur.hint()?; - assert!(a_last_sumcheck_value == final_folds[0] * final_folds[1] * final_folds[2]); + let claimed_val = + final_folds[0] + + final_folds[1] * matrix_batching_randomness + + final_folds[2] * matrix_batching_randomness * matrix_batching_randomness; + assert!(a_last_sumcheck_value == claimed_val * final_folds[3] * final_folds[4]); let mut a_spark_sumcheck_statement_verifier = Statement::::new(next_power_of_two( num_nonzero_terms, )); + let mut batching_randomness = Vec::with_capacity(5); + let mut cur = FieldElement::from(1); + for _ in 0..5 { + batching_randomness.push(cur); + cur *= a_sumcheck_commitment.batching_randomness; + } + a_spark_sumcheck_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0] + - final_folds[1] * a_sumcheck_commitment.batching_randomness + - final_folds[2] * a_sumcheck_commitment.batching_randomness * a_sumcheck_commitment.batching_randomness, + final_folds[0] * batching_randomness[0] + + final_folds[1] * batching_randomness[1] + + final_folds[2] * batching_randomness[2] + + final_folds[3] * batching_randomness[3] + + final_folds[4] * batching_randomness[4] ); - let a_spark_sumcheck_verifier = Verifier::new(num_nonzero_term_batched3_config); + let a_spark_sumcheck_verifier = Verifier::new(&whir_params.num_terms_5batched); a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; // Matrix A - Rowwise @@ -153,7 +166,7 @@ pub fn verify_spark_single_matrix( final_cntr, ); - let final_cntr_verifier = Verifier::new(row_config); + let final_cntr_verifier = Verifier::new(&whir_params.row); final_cntr_verifier .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) .context("while verifying WHIR")?; @@ -204,7 +217,8 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, ); - a_spark_sumcheck_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + let a_rowwise_verifier = Verifier::new(&whir_params.num_terms_3batched); + a_rowwise_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -245,7 +259,7 @@ pub fn verify_spark_single_matrix( final_cntr, ); - let final_cntr_verifier = Verifier::new(col_config); + let final_cntr_verifier = Verifier::new(&whir_params.col); final_cntr_verifier .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) .context("while verifying WHIR")?; @@ -298,7 +312,8 @@ pub fn verify_spark_single_matrix( rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, ); - a_spark_sumcheck_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + let a_colwise_verifier = Verifier::new(&whir_params.num_terms_3batched); + a_colwise_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index faef9438..aea0003c 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,9 +1,9 @@ use { - anyhow::{Context, Result}, ark_ff::AdditiveGroup, provekit_common::{file::write, gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme}, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::{ + anyhow::{Context, Result}, ark_ff::AdditiveGroup, ark_std::rand::seq::index, provekit_common::{file::write, gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme}, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::{ memory::{calculate_e_values_for_r1cs, calculate_memory, EValuesForMatrix, Memory}, spark::{prove_spark_for_single_matrix, run_spark_sumcheck}, utilities::{ - calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, SparkMatrix, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark + calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, COOMatrixNew, SparkMatrix, SparkMatrixNew, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark }, whir::{create_whir_configs, SPARKWHIRConfigsNew}, }, spongefish::codecs::arkworks_algebra::{FieldDomainSeparator, FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, mem}, whir::{poly_utils::evals::EvaluationsList, whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, statement::Statement, utils::HintSerialize}} @@ -41,7 +41,46 @@ fn main() -> Result<()> { row.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); col.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); + // generate val vectors + let mut val_a = vec![FieldElement::ZERO; padded_num_entries]; + let mut val_b = vec![FieldElement::ZERO; padded_num_entries]; + let mut val_c = vec![FieldElement::ZERO; padded_num_entries]; + + let a_binding = r1cs.a(); + let b_binding = r1cs.b(); + let c_binding = r1cs.c(); + + let mut a_iter = a_binding.iter(); + let mut b_iter = b_binding.iter(); + let mut c_iter = c_binding.iter(); + let mut a_cur = a_iter.next(); + let mut b_cur = b_iter.next(); + let mut c_cur = c_iter.next(); + + for (index, coordinate) in combined_matrix_map.keys().enumerate() { + if let Some((coord, value)) = a_cur { + if coord == *coordinate { + val_a[index] = value; + a_cur = a_iter.next(); + } + } + + if let Some((coord, value)) = b_cur { + if coord == *coordinate { + val_b[index] = value; + b_cur = b_iter.next(); + } + } + + if let Some((coord, value)) = c_cur { + if coord == *coordinate { + val_c[index] = value; + c_cur = c_iter.next(); + } + } + } + // generate padded timestamps let mut read_row_counters = vec![0; r1cs.num_constraints()]; @@ -94,6 +133,7 @@ fn main() -> Result<()> { let row_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_constraints()), 1); let col_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_witnesses()), 1); let num_terms_3batched_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); + let num_terms_5batched_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 5); // Create io_pattern let mut io = IOPattern::new("💥"); @@ -106,14 +146,14 @@ fn main() -> Result<()> { io = io - .commit_statement(&num_terms_3batched_config) + .commit_statement(&num_terms_5batched_config) .commit_statement(&num_terms_3batched_config) .commit_statement(&num_terms_3batched_config) .commit_statement(&row_config) .commit_statement(&col_config) .add_sumcheck_polynomials(next_power_of_two(padded_num_entries)) .hint("sumcheck_last_folds") - .add_whir_proof(&num_terms_3batched_config); + .add_whir_proof(&num_terms_5batched_config); // Rowwise @@ -173,7 +213,7 @@ fn main() -> Result<()> { merlin.hint(&request.point_to_evaluate.row)?; merlin.hint(&request.point_to_evaluate.col)?; - // Calculate the RLC of the matrices + // Calculate the RLC of the matrices (can be also calculated from rlc of val_a, val_b, val_c) merlin.add_scalars(&[request.claimed_values.a, request.claimed_values.b, request.claimed_values.c])?; let mut matrix_batching_randomness = [FieldElement::ZERO; 1]; merlin.fill_challenge_scalars(&mut matrix_batching_randomness)?; @@ -181,7 +221,7 @@ fn main() -> Result<()> { let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; for (coordinate, value) in r1cs.a().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness_sq); + combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value); } for (coordinate, value) in r1cs.b().iter() { @@ -189,7 +229,7 @@ fn main() -> Result<()> { } for (coordinate, value) in r1cs.c().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value ); + combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness_sq); } let mut val = Vec::with_capacity(padded_num_entries); @@ -198,15 +238,21 @@ fn main() -> Result<()> { } val.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); - let claimed_value = request.claimed_values.a * matrix_batching_randomness_sq - + request.claimed_values.b * matrix_batching_randomness - + request.claimed_values.c; + let claimed_value = + request.claimed_values.a + + request.claimed_values.b * matrix_batching_randomness + + request.claimed_values.c * matrix_batching_randomness_sq; + + // Bundle values - let spark_matrix = SparkMatrix { - coo: COOMatrix{ + let spark_matrix = SparkMatrixNew { + coo: COOMatrixNew{ row, col, val, + val_a, + val_b, + val_c, }, timestamps: TimeStamps { read_row, @@ -216,7 +262,6 @@ fn main() -> Result<()> { } }; - let e_values = EValuesForMatrix { e_rx, e_ry, @@ -226,6 +271,7 @@ fn main() -> Result<()> { row: row_config, col: col_config, num_terms_3batched: num_terms_3batched_config, + num_terms_5batched: num_terms_5batched_config, }; prove_spark_for_single_matrix( diff --git a/spark-prover/src/spark.rs b/spark-prover/src/spark.rs index 6c092ebd..d59ff127 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/spark.rs @@ -2,7 +2,7 @@ use { crate::{ gpa::run_gpa, memory::{EValuesForMatrix, Memory}, - utilities::matrix::SparkMatrix, + utilities::matrix::{SparkMatrix, SparkMatrixNew}, whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigsNew}, }, anyhow::{ensure, Result}, @@ -20,14 +20,14 @@ use { ProverState, }, whir::{ - poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + poly_utils::{evals::EvaluationsList, fold, multilinear::MultilinearPoint}, whir::{committer::CommitmentWriter, prover::Prover, statement::{Statement, Weights}, utils::HintSerialize}, }, }; pub fn prove_spark_for_single_matrix( merlin: &mut ProverState, - matrix: SparkMatrix, + matrix: SparkMatrixNew, memory: &Memory, e_values: EValuesForMatrix, claimed_value: FieldElement, @@ -35,21 +35,24 @@ pub fn prove_spark_for_single_matrix( ) -> Result<()> { let row_committer = CommitmentWriter::new(whir_configs.row.clone()); let col_committer = CommitmentWriter::new(whir_configs.col.clone()); - let batched_committer = CommitmentWriter::new(whir_configs.num_terms_3batched.clone()); + let batched3_committer = CommitmentWriter::new(whir_configs.num_terms_3batched.clone()); + let batched5_committer = CommitmentWriter::new(whir_configs.num_terms_5batched.clone()); - let sumcheck_witness = batched_committer.commit_batch(merlin, &[ - EvaluationsList::new(matrix.coo.val.clone()).to_coeffs(), + let sumcheck_witness = batched5_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.val_a.clone()).to_coeffs(), + EvaluationsList::new(matrix.coo.val_b.clone()).to_coeffs(), + EvaluationsList::new(matrix.coo.val_c.clone()).to_coeffs(), EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), ])?; - let rowwise_witness = batched_committer.commit_batch(merlin, &[ + let rowwise_witness = batched3_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(), EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(), ])?; - let colwise_witness = batched_committer.commit_batch(merlin, &[ + let colwise_witness = batched3_committer.commit_batch(merlin, &[ EvaluationsList::new(matrix.coo.col.clone()).to_coeffs(), EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), @@ -69,17 +72,41 @@ pub fn prove_spark_for_single_matrix( let (sumcheck_final_folds, folding_randomness) = run_spark_sumcheck(merlin, mles, claimed_value)?; + let val_a_eval = EvaluationsList::new(matrix.coo.val_a.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + let val_b_eval = EvaluationsList::new(matrix.coo.val_b.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + let val_c_eval = EvaluationsList::new(matrix.coo.val_c.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + + merlin.hint::>(&[val_a_eval, val_b_eval, val_c_eval, sumcheck_final_folds[1], sumcheck_final_folds[2]].to_vec())?; + let mut sumcheck_statement = Statement::::new(folding_randomness.len()); + + let mut batching_randomness = Vec::with_capacity(5); + let mut cur = FieldElement::from(1); + for _ in 0..5 { + batching_randomness.push(cur); + cur *= sumcheck_witness.batching_randomness; + } + + let claimed_batched_value = - sumcheck_final_folds[0] + - sumcheck_final_folds[1] * sumcheck_witness.batching_randomness + - sumcheck_final_folds[2] * sumcheck_witness.batching_randomness * sumcheck_witness.batching_randomness; + val_a_eval * batching_randomness[0] + + val_b_eval * batching_randomness[1] + + val_c_eval * batching_randomness[2] + + sumcheck_final_folds[1] * batching_randomness[3] + + sumcheck_final_folds[2] * batching_randomness[4]; + + println!("{:?}", batching_randomness); //Reilabs Debug: + println!("{:?}", claimed_batched_value); //Reilabs Debug: + println!("{:?}", sumcheck_witness.batched_poly().evaluate(&MultilinearPoint(folding_randomness.to_vec().clone()))); //Reilabs Debug: sumcheck_statement.add_constraint( Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); + let sumcheck_prover = Prover::new(whir_configs.num_terms_5batched.clone()); sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; // Rowwise @@ -354,7 +381,6 @@ pub fn run_spark_sumcheck( let folded_v1 = m1[0] + (m1[1] - m1[0]) * sumcheck_randomness[0]; let folded_v2 = m2[0] + (m2[1] - m2[0]) * sumcheck_randomness[0]; - merlin.hint::>(&[folded_v0, folded_v1, folded_v2].to_vec())?; Ok(( [folded_v0, folded_v1, folded_v2], sumcheck_randomness_accumulator, diff --git a/spark-prover/src/utilities/matrix/mod.rs b/spark-prover/src/utilities/matrix/mod.rs index 2950b718..e3d19521 100644 --- a/spark-prover/src/utilities/matrix/mod.rs +++ b/spark-prover/src/utilities/matrix/mod.rs @@ -13,12 +13,26 @@ pub struct SparkMatrix { pub timestamps: TimeStamps, } #[derive(Debug)] +pub struct SparkMatrixNew { + pub coo: COOMatrixNew, + pub timestamps: TimeStamps, +} +#[derive(Debug)] pub struct COOMatrix { pub row: Vec, pub col: Vec, pub val: Vec, } #[derive(Debug)] +pub struct COOMatrixNew { + pub row: Vec, + pub col: Vec, + pub val: Vec, + pub val_a: Vec, + pub val_b: Vec, + pub val_c: Vec, +} +#[derive(Debug)] pub struct TimeStamps { pub read_row: Vec, pub read_col: Vec, diff --git a/spark-prover/src/whir.rs b/spark-prover/src/whir.rs index 899aa98c..fe3f0072 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/whir.rs @@ -51,6 +51,7 @@ pub struct SPARKWHIRConfigsNew { pub row: WhirConfig, pub col: WhirConfig, pub num_terms_3batched: WhirConfig, + pub num_terms_5batched: WhirConfig, } pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { From dedc0dc3b951876f02922711b2bbe403040595bf Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 3 Oct 2025 14:38:01 +0800 Subject: [PATCH 24/34] working rlc --- recursive-verifier/app/circuit/circuit.go | 245 ++++-------------- recursive-verifier/app/circuit/common.go | 60 +---- recursive-verifier/app/circuit/types.go | 18 +- spark-prover/Cargo.toml | 2 +- spark-prover/src/bin/generate_test_r1cs.rs | 10 +- spark-prover/src/bin/generate_test_request.rs | 13 +- spark-prover/src/bin/spark-verifier.rs | 52 ++-- spark-prover/src/main.rs | 38 ++- spark-prover/src/utilities/mod.rs | 13 +- 9 files changed, 148 insertions(+), 303 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 28c764b4..3db6a053 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -1,7 +1,6 @@ package circuit import ( - "fmt" "log" "os" @@ -43,10 +42,10 @@ type Circuit struct { IO []byte UseSpark bool - SPARKTranscript []uints.U8 `gnark:",public"` + SPARKTranscript []uints.U8 SPARKIO []byte - Transcript []uints.U8 `gnark:",public"` + Transcript []uints.U8 WHIRRow WHIRParams WHIRCol WHIRParams @@ -54,8 +53,6 @@ type Circuit struct { PointCol []frontend.Variable SparkA SPARKMatrixData - SparkB SPARKMatrixData - SparkC SPARKMatrixData } func (circuit *Circuit) Define(api frontend.API) error { @@ -115,30 +112,6 @@ func (circuit *Circuit) Define(api frontend.API) error { if err != nil { return err } - - err = sparkSingleMatrix( - api, - arthur, - uapi, - sc, - circuit.SparkB, - circuit, - ) - if err != nil { - return err - } - - err = sparkSingleMatrix( - api, - arthur, - uapi, - sc, - circuit.SparkC, - circuit, - ) - if err != nil { - return err - } } else { matrixExtensionEvals := evaluateR1CSMatrixExtension(api, circuit, spartanSumcheckRand, whirFoldingRandomness) @@ -177,23 +150,13 @@ func verifyCircuit( witnessLinearStatementEvaluations[1] = typeConverters.LimbsToBigIntMod(deferred[2].Limbs) witnessLinearStatementEvaluations[2] = typeConverters.LimbsToBigIntMod(deferred[3].Limbs) - acontSparkSumcheckLast := make([]frontend.Variable, 3) - asparkSumcheckLast := make([]frontend.Variable, 3) + acontSparkSumcheckLast := make([]frontend.Variable, 5) + asparkSumcheckLast := make([]frontend.Variable, 5) asparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[0].Limbs) asparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[1].Limbs) asparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[2].Limbs) - - bcontSparkSumcheckLast := make([]frontend.Variable, 3) - bsparkSumcheckLast := make([]frontend.Variable, 3) - bsparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[0].Limbs) - bsparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[1].Limbs) - bsparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.BHints.sparkClaimedEvaluations[2].Limbs) - - ccontSparkSumcheckLast := make([]frontend.Variable, 3) - csparkSumcheckLast := make([]frontend.Variable, 3) - csparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[0].Limbs) - csparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[1].Limbs) - csparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.CHints.sparkClaimedEvaluations[2].Limbs) + asparkSumcheckLast[3] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[3].Limbs) + asparkSumcheckLast[4] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[4].Limbs) contPointRow := make([]frontend.Variable, len(hints.pointRow)) pointRow := make([]frontend.Variable, len(hints.pointRow)) @@ -207,8 +170,6 @@ func verifyCircuit( pointCol[i] = typeConverters.LimbsToBigIntMod(hints.pointCol[i].Limbs) } - fmt.Print(bsparkSumcheckLast) - fSums, gSums := parseClaimedEvaluations(claimedEvaluations, true) matrixA := make([]MatrixCell, len(internedR1CS.A.Values)) @@ -283,7 +244,7 @@ func verifyCircuit( WHIRRow: NewWhirParams(sparkConfig.WHIRRow), WHIRCol: NewWhirParams(sparkConfig.WHIRCol), - LogANumTerms: sparkConfig.LogANumTerms, + LogANumTerms: sparkConfig.LogNumTerms, PointRow: contPointRow, PointCol: contPointCol, @@ -318,76 +279,9 @@ func verifyCircuit( ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, true), - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - LogANumTerms: sparkConfig.LogANumTerms, - }, - - SparkB: SPARKMatrixData{ - Claimed: typeConverters.LimbsToBigIntMod(hints.BHints.claimed.Limbs), - - SparkSumcheckLast: bcontSparkSumcheckLast, - - RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), - RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSAddressEvaluation.Limbs), - RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSValueEvaluation.Limbs), - RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSTimestampEvaluation.Limbs), - - ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.colFinalCounter.Limbs), - ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSAddressEvaluation.Limbs), - ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSValueEvaluation.Limbs), - ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSTimestampEvaluation.Limbs), - - SparkSumcheckFirstRound: newMerkle(hints.BHints.sparkSumcheckData.firstRoundMerklePaths.path, true), - SparkSumcheckMerkle: newMerkle(hints.BHints.sparkSumcheckData.roundHints, true), - - RowFinalMerkleFirstRound: newMerkle(hints.BHints.rowFinalMerkle.firstRoundMerklePaths.path, true), - RowFinalMerkle: newMerkle(hints.BHints.rowFinalMerkle.roundHints, true), - - RowwiseMerkleFirstRound: newMerkle(hints.BHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), - RowwiseMerkle: newMerkle(hints.BHints.rowwiseSparkMerkle.roundHints, true), - - ColFinalMerkleFirstRound: newMerkle(hints.BHints.colFinalMerkle.firstRoundMerklePaths.path, true), - ColFinalMerkle: newMerkle(hints.BHints.colFinalMerkle.roundHints, true), - - ColwiseMerkleFirstRound: newMerkle(hints.BHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), - ColwiseMerkle: newMerkle(hints.BHints.colwiseSparkMerkle.roundHints, true), - - WHIRA3: NewWhirParams(sparkConfig.WHIRB3), - LogANumTerms: sparkConfig.LogBNumTerms, - }, - - SparkC: SPARKMatrixData{ - Claimed: typeConverters.LimbsToBigIntMod(hints.CHints.claimed.Limbs), - - SparkSumcheckLast: ccontSparkSumcheckLast, - - RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), - RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSAddressEvaluation.Limbs), - RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSValueEvaluation.Limbs), - RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSTimestampEvaluation.Limbs), - - ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.colFinalCounter.Limbs), - ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSAddressEvaluation.Limbs), - ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSValueEvaluation.Limbs), - ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSTimestampEvaluation.Limbs), - - SparkSumcheckFirstRound: newMerkle(hints.CHints.sparkSumcheckData.firstRoundMerklePaths.path, true), - SparkSumcheckMerkle: newMerkle(hints.CHints.sparkSumcheckData.roundHints, true), - - RowFinalMerkleFirstRound: newMerkle(hints.CHints.rowFinalMerkle.firstRoundMerklePaths.path, true), - RowFinalMerkle: newMerkle(hints.CHints.rowFinalMerkle.roundHints, true), - - RowwiseMerkleFirstRound: newMerkle(hints.CHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, true), - RowwiseMerkle: newMerkle(hints.CHints.rowwiseSparkMerkle.roundHints, true), - - ColFinalMerkleFirstRound: newMerkle(hints.CHints.colFinalMerkle.firstRoundMerklePaths.path, true), - ColFinalMerkle: newMerkle(hints.CHints.colFinalMerkle.roundHints, true), - - ColwiseMerkleFirstRound: newMerkle(hints.CHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), - ColwiseMerkle: newMerkle(hints.CHints.colwiseSparkMerkle.roundHints, true), - - WHIRA3: NewWhirParams(sparkConfig.WHIRC3), - LogANumTerms: sparkConfig.LogCNumTerms, + WHIRA3: NewWhirParams(sparkConfig.WHIR3), + WHIRA5: NewWhirParams(sparkConfig.WHIR5), + LogANumTerms: sparkConfig.LogNumTerms, }, UseSpark: useSpark, @@ -448,7 +342,7 @@ func verifyCircuit( SPARKTranscript: sparkTranscriptT, WHIRRow: NewWhirParams(sparkConfig.WHIRRow), WHIRCol: NewWhirParams(sparkConfig.WHIRCol), - LogANumTerms: sparkConfig.LogANumTerms, + LogANumTerms: sparkConfig.LogNumTerms, PointRow: pointRow, PointCol: pointCol, @@ -483,76 +377,9 @@ func verifyCircuit( ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, false), - WHIRA3: NewWhirParams(sparkConfig.WHIRA3), - LogANumTerms: sparkConfig.LogANumTerms, - }, - - SparkB: SPARKMatrixData{ - Claimed: typeConverters.LimbsToBigIntMod(hints.BHints.claimed.Limbs), - - SparkSumcheckLast: bsparkSumcheckLast, - - RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.rowFinalCounter.Limbs), - RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSAddressEvaluation.Limbs), - RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSValueEvaluation.Limbs), - RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.rowRSTimestampEvaluation.Limbs), - - ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.BHints.colFinalCounter.Limbs), - ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSAddressEvaluation.Limbs), - ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSValueEvaluation.Limbs), - ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.BHints.colRSTimestampEvaluation.Limbs), - - SparkSumcheckFirstRound: newMerkle(hints.BHints.sparkSumcheckData.firstRoundMerklePaths.path, false), - SparkSumcheckMerkle: newMerkle(hints.BHints.sparkSumcheckData.roundHints, false), - - RowFinalMerkleFirstRound: newMerkle(hints.BHints.rowFinalMerkle.firstRoundMerklePaths.path, false), - RowFinalMerkle: newMerkle(hints.BHints.rowFinalMerkle.roundHints, false), - - RowwiseMerkleFirstRound: newMerkle(hints.BHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), - RowwiseMerkle: newMerkle(hints.BHints.rowwiseSparkMerkle.roundHints, false), - - ColFinalMerkleFirstRound: newMerkle(hints.BHints.colFinalMerkle.firstRoundMerklePaths.path, false), - ColFinalMerkle: newMerkle(hints.BHints.colFinalMerkle.roundHints, false), - - ColwiseMerkleFirstRound: newMerkle(hints.BHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), - ColwiseMerkle: newMerkle(hints.BHints.colwiseSparkMerkle.roundHints, false), - - WHIRA3: NewWhirParams(sparkConfig.WHIRB3), - LogANumTerms: sparkConfig.LogBNumTerms, - }, - - SparkC: SPARKMatrixData{ - Claimed: typeConverters.LimbsToBigIntMod(hints.CHints.claimed.Limbs), - - SparkSumcheckLast: csparkSumcheckLast, - - RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.rowFinalCounter.Limbs), - RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSAddressEvaluation.Limbs), - RowRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSValueEvaluation.Limbs), - RowRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.rowRSTimestampEvaluation.Limbs), - - ColFinalCounter: typeConverters.LimbsToBigIntMod(hints.CHints.colFinalCounter.Limbs), - ColRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSAddressEvaluation.Limbs), - ColRSValueEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSValueEvaluation.Limbs), - ColRSTimestampEvaluation: typeConverters.LimbsToBigIntMod(hints.CHints.colRSTimestampEvaluation.Limbs), - - SparkSumcheckFirstRound: newMerkle(hints.CHints.sparkSumcheckData.firstRoundMerklePaths.path, false), - SparkSumcheckMerkle: newMerkle(hints.CHints.sparkSumcheckData.roundHints, false), - - RowFinalMerkleFirstRound: newMerkle(hints.CHints.rowFinalMerkle.firstRoundMerklePaths.path, false), - RowFinalMerkle: newMerkle(hints.CHints.rowFinalMerkle.roundHints, false), - - RowwiseMerkleFirstRound: newMerkle(hints.CHints.rowwiseSparkMerkle.firstRoundMerklePaths.path, false), - RowwiseMerkle: newMerkle(hints.CHints.rowwiseSparkMerkle.roundHints, false), - - ColFinalMerkleFirstRound: newMerkle(hints.CHints.colFinalMerkle.firstRoundMerklePaths.path, false), - ColFinalMerkle: newMerkle(hints.CHints.colFinalMerkle.roundHints, false), - - ColwiseMerkleFirstRound: newMerkle(hints.CHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), - ColwiseMerkle: newMerkle(hints.CHints.colwiseSparkMerkle.roundHints, false), - - WHIRA3: NewWhirParams(sparkConfig.WHIRC3), - LogANumTerms: sparkConfig.LogCNumTerms, + WHIRA3: NewWhirParams(sparkConfig.WHIR3), + WHIRA5: NewWhirParams(sparkConfig.WHIR5), + LogANumTerms: sparkConfig.LogNumTerms, }, UseSpark: useSpark, @@ -664,7 +491,24 @@ func sparkSingleMatrix( matrix SPARKMatrixData, circuit *Circuit, ) error { - sumcheckCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + + claimedEvaluations := make([]frontend.Variable, 3) + if err := arthur.FillNextScalars(claimedEvaluations); err != nil { + return err + } + + matrixCombinationRandomness := make([]frontend.Variable, 1) + if err := arthur.FillChallengeScalars(matrixCombinationRandomness); err != nil { + return err + } + + claimedValue := api.Add( + claimedEvaluations[0], + api.Mul(claimedEvaluations[1], matrixCombinationRandomness[0]), + api.Mul(claimedEvaluations[2], matrixCombinationRandomness[0], matrixCombinationRandomness[0]), + ) + + sumcheckCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA5) if err != nil { return err } @@ -687,15 +531,26 @@ func sparkSingleMatrix( } // After debug: Change 1 to actual claimed value - sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, matrix.Claimed, matrix.LogANumTerms, 4) + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, claimedValue, matrix.LogANumTerms, 4) if err != nil { return err } - api.AssertIsEqual(sparkSumcheckLastEval, api.Mul(matrix.SparkSumcheckLast[0], matrix.SparkSumcheckLast[1], matrix.SparkSumcheckLast[2])) + api.Println(sparkSumcheckLastEval) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.SparkSumcheckMerkle, matrix.SparkSumcheckFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}, {}, {}}, []frontend.Variable{}, sumcheckCommitment, - [][]frontend.Variable{{matrix.SparkSumcheckLast[0]}, {matrix.SparkSumcheckLast[1]}, {matrix.SparkSumcheckLast[2]}}, + _ = sparkSumcheckFoldingRandomness + _ = sparkSumcheckLastEval + + claimedVal := api.Add( + matrix.SparkSumcheckLast[0], + api.Mul(matrix.SparkSumcheckLast[1], matrixCombinationRandomness[0]), + api.Mul(matrix.SparkSumcheckLast[2], matrixCombinationRandomness[0], matrixCombinationRandomness[0]), + ) + + api.AssertIsEqual(sparkSumcheckLastEval, api.Mul(claimedVal, matrix.SparkSumcheckLast[3], matrix.SparkSumcheckLast[4])) + + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.SparkSumcheckMerkle, matrix.SparkSumcheckFirstRound, matrix.WHIRA5, [][]frontend.Variable{{}, {}, {}, {}, {}}, []frontend.Variable{}, sumcheckCommitment, + [][]frontend.Variable{{matrix.SparkSumcheckLast[0]}, {matrix.SparkSumcheckLast[1]}, {matrix.SparkSumcheckLast[2]}, {matrix.SparkSumcheckLast[3]}, {matrix.SparkSumcheckLast[4]}}, [][]frontend.Variable{sparkSumcheckFoldingRandomness}, ) if err != nil { @@ -841,5 +696,11 @@ func sparkSingleMatrix( api.AssertIsEqual(api.Mul(colwiseClaimedInit, colwiseClaimedWS), api.Mul(colwiseClaimedRS, colwiseClaimedFinal)) + _ = sumcheckCommitment + _ = rowwiseCommitment + _ = colwiseCommitment + _ = rowFinalCommitment + _ = colFinalCommitment + return nil } diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 34d47dad..4d354d77 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -354,23 +354,11 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var hidingSpartanData = consumeWhirData(config.WHIRConfigHidingSpartan, &merklePaths, &stirAnswers) var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) - var asparkSumcheckData = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var asparkSumcheckData = consumeWhirData(sparkConfig.WHIR5, &sparkMerklePaths, &sparkStirAnswers) var arowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) - var arowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) + var arowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) var acolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) - var acolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRA3, &sparkMerklePaths, &sparkStirAnswers) - - var bsparkSumcheckData = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - var browFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) - var browwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - var bcolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) - var bcolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - - var csparkSumcheckData = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - var crowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) - var crowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) - var ccolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) - var ccolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIRB3, &sparkMerklePaths, &sparkStirAnswers) + var acolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ pointRow: pointRow, @@ -399,48 +387,6 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, colRSValueEvaluation: colRSValueEvaluation[0], colRSTimestampEvaluation: colRSTimestampEvaluation[0], }, - - BHints: SparkMatrixHints{ - claimed: claimedB, - sparkSumcheckData: bsparkSumcheckData, - rowFinalMerkle: browFinal, - rowwiseSparkMerkle: browwiseSparkMerkle, - colFinalMerkle: bcolFinal, - colwiseSparkMerkle: bcolwiseSparkMerkle, - - sparkClaimedEvaluations: sparkClaimedEvaluations[1], - - rowFinalCounter: rowFinalCounter[1], - rowRSAddressEvaluation: rowRSAddressEvaluation[1], - rowRSValueEvaluation: rowRSValueEvaluation[1], - rowRSTimestampEvaluation: rowRSTimestampEvaluation[1], - - colFinalCounter: colFinalCounter[1], - colRSAddressEvaluation: colRSAddressEvaluation[1], - colRSValueEvaluation: colRSValueEvaluation[1], - colRSTimestampEvaluation: colRSTimestampEvaluation[1], - }, - - CHints: SparkMatrixHints{ - claimed: claimedC, - sparkSumcheckData: csparkSumcheckData, - rowFinalMerkle: crowFinal, - rowwiseSparkMerkle: crowwiseSparkMerkle, - colFinalMerkle: ccolFinal, - colwiseSparkMerkle: ccolwiseSparkMerkle, - - sparkClaimedEvaluations: sparkClaimedEvaluations[2], - - rowFinalCounter: rowFinalCounter[2], - rowRSAddressEvaluation: rowRSAddressEvaluation[2], - rowRSValueEvaluation: rowRSValueEvaluation[2], - rowRSTimestampEvaluation: rowRSTimestampEvaluation[2], - - colFinalCounter: colFinalCounter[2], - colRSAddressEvaluation: colRSAddressEvaluation[2], - colRSValueEvaluation: colRSValueEvaluation[2], - colRSTimestampEvaluation: colRSTimestampEvaluation[2], - }, } err = verifyCircuit(deferred, config, sparkConfig, hints, pk, vk, outputCcsPath, claimedEvaluations, r1cs, interner, evaluation) diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index f1c97f99..c32ddaf3 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -150,16 +150,13 @@ type ClaimedEvaluations struct { // } type SparkConfig struct { - IOPattern string `json:"io_pattern"` - Transcript []byte `json:"transcript"` - WHIRA3 WHIRConfig `json:"whir_a3"` - WHIRB3 WHIRConfig `json:"whir_b3"` - WHIRC3 WHIRConfig `json:"whir_c3"` - WHIRRow WHIRConfig `json:"whir_row"` - WHIRCol WHIRConfig `json:"whir_col"` - LogANumTerms int `json:"log_a_num_terms"` - LogBNumTerms int `json:"log_b_num_terms"` - LogCNumTerms int `json:"log_c_num_terms"` + IOPattern string `json:"io_pattern"` + Transcript []byte `json:"transcript"` + WHIR3 WHIRConfig `json:"whir_3batched"` + WHIR5 WHIRConfig `json:"whir_5batched"` + WHIRRow WHIRConfig `json:"whir_row"` + WHIRCol WHIRConfig `json:"whir_col"` + LogNumTerms int `json:"log_num_terms"` } type Commitment struct { @@ -173,6 +170,7 @@ type SPARKMatrixData struct { Claimed frontend.Variable WHIRA3 WHIRParams + WHIRA5 WHIRParams LogANumTerms int SparkSumcheckLast []frontend.Variable diff --git a/spark-prover/Cargo.toml b/spark-prover/Cargo.toml index 7a488255..06657d15 100644 --- a/spark-prover/Cargo.toml +++ b/spark-prover/Cargo.toml @@ -9,6 +9,7 @@ homepage.workspace = true repository.workspace = true [dependencies] +argh = "0.1.12" provekit-common.workspace = true provekit-r1cs-compiler.workspace = true serde_json.workspace = true @@ -20,7 +21,6 @@ ark-std.workspace = true ark-ff.workspace = true itertools = "0.14.0" - [lints] workspace = true diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs index 41e402f9..53bbe81d 100644 --- a/spark-prover/src/bin/generate_test_r1cs.rs +++ b/spark-prover/src/bin/generate_test_r1cs.rs @@ -7,13 +7,19 @@ fn main() { let mut r1cs = R1CS::new(); r1cs.grow_matrices(256, 256); let interned_1 = r1cs.interner.intern(FieldElement::from(1)); + let interned_2 = r1cs.interner.intern(FieldElement::from(2)); + let interned_3 = r1cs.interner.intern(FieldElement::from(3)); for i in 0..256 { r1cs.a.set(i, i, interned_1); - r1cs.b.set(i, i, interned_1); - r1cs.c.set(i, i, interned_1); + r1cs.b.set(i, i, interned_2); + r1cs.c.set(i, i, interned_3); } + r1cs.a.set(1, 0, interned_1); + r1cs.a.set(2, 0, interned_1); + r1cs.a.set(3, 0, interned_1); + let matrix_json = serde_json::to_string(&r1cs).expect("Error: Failed to serialize R1CS to JSON"); let mut request_file = diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index 37ab5075..55adcb23 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -4,15 +4,20 @@ use { }; fn main() { + let mut row = vec![FieldElement::from(0); 8]; + let mut col = vec![FieldElement::from(0); 9]; + + row[7] = FieldElement::from(1); + let spark_request = SPARKRequest { point_to_evaluate: Point { - row: vec![FieldElement::from(0); 8], - col: vec![FieldElement::from(0); 9], + row, + col, }, claimed_values: ClaimedValues { a: FieldElement::from(1), - b: FieldElement::from(1), - c: FieldElement::from(1), + b: FieldElement::from(0), + c: FieldElement::from(0), }, }; diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index c8b05b3e..b6ade89c 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -1,19 +1,13 @@ use { - anyhow::{ensure, Context, Result}, - ark_std::{One, Zero}, - provekit_common::{ + anyhow::{ensure, Context, Result}, argh::FromArgs, ark_std::{One, Zero}, provekit_common::{ skyscraper::SkyscraperSponge, spark::SPARKRequest, utils::{ next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, }, FieldElement, IOPattern, WhirConfig - }, - spark_prover::{utilities::SPARKProof, whir::SPARKWHIRConfigsNew}, - spongefish::{ + }, spark_prover::{utilities::SPARKProof, whir::SPARKWHIRConfigsNew}, spongefish::{ codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, VerifierState, - }, - std::fs::{self, File}, - whir::{ + }, std::{fs::{self, File}, path::PathBuf}, whir::{ poly_utils::multilinear::MultilinearPoint, whir::{ committer::CommitmentReader, @@ -21,19 +15,34 @@ use { utils::HintDeserialize, verifier::Verifier, }, - }, + } }; +#[derive(FromArgs)] +#[argh(description="Spark Verifier CLI")] +struct Args { + /// request + #[argh(option)] + request: PathBuf, + + /// proof + #[argh(option)] + proof: PathBuf, +} + + fn main() -> Result<()> { - let spark_proof_json_str = fs::read_to_string("spark-prover/spark_proof.json") - .context("Error: Failed to open the r1cs.json file")?; + let args: Args = argh::from_env(); + + let spark_proof_json_str = fs::read_to_string(args.proof) + .context("Error: Failed to open the proof file")?; let spark_proof: SPARKProof = serde_json::from_str(&spark_proof_json_str) - .context("Error: Failed to deserialize JSON to R1CS")?; + .context("Error: Failed to deserialize proof")?; - let request_json_str = fs::read_to_string("spark-prover/request.json") - .context("Error: Failed to open the r1cs.json file")?; + let request_json_str = fs::read_to_string(args.request) + .context("Error: Failed to open the request file")?; let request: SPARKRequest = serde_json::from_str(&request_json_str) - .context("Error: Failed to deserialize JSON to R1CS")?; + .context("Error: Failed to deserialize request")?; let io = IOPattern::from_string(spark_proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&spark_proof.transcript); @@ -77,6 +86,7 @@ pub fn verify_spark_single_matrix( request: &SPARKRequest, claimed_value: &FieldElement, ) -> Result<()> { + println!("{:?}", claimed_value); //Reilabs Debug: let commitment_reader_row = CommitmentReader::new(&whir_params.row); let commitment_reader_col = CommitmentReader::new(&whir_params.col); @@ -101,6 +111,8 @@ pub fn verify_spark_single_matrix( ) .context("While verifying SPARK sumcheck")?; + println!("{:?}", a_last_sumcheck_value); //Reilabs Debug: + let final_folds: Vec = arthur.hint()?; let claimed_val = @@ -182,6 +194,8 @@ pub fn verify_spark_single_matrix( let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + final_opening * last_randomness[0]; + println!("Rowwise init {:?}", evaluated_value); //Reilabs Debug: + println!("Rowwise init {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let gpa_result = gpa_sumcheck_verifier( @@ -204,6 +218,8 @@ pub fn verify_spark_single_matrix( let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + println!("Rowwise rs {:?}", evaluated_value); //Reilabs Debug: + println!("Rowwise rs {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( @@ -275,6 +291,8 @@ pub fn verify_spark_single_matrix( let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + final_opening * last_randomness[0]; + println!("Colwise init {:?}", evaluated_value); //Reilabs Debug: + println!("Colwise init {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); // Colwise RS WS GPA @@ -299,6 +317,8 @@ pub fn verify_spark_single_matrix( let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + println!("Colwise rs {:?}", evaluated_value); //Reilabs Debug: + println!(" {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index aea0003c..925aa3f0 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -3,10 +3,11 @@ use { memory::{calculate_e_values_for_r1cs, calculate_memory, EValuesForMatrix, Memory}, spark::{prove_spark_for_single_matrix, run_spark_sumcheck}, utilities::{ - calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, COOMatrixNew, SparkMatrix, SparkMatrixNew, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark + calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, COOMatrixNew, SparkMatrix, SparkMatrixNew, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark, SPARKProofGnarkNew }, whir::{create_whir_configs, SPARKWHIRConfigsNew}, }, spongefish::codecs::arkworks_algebra::{FieldDomainSeparator, FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, mem}, whir::{poly_utils::evals::EvaluationsList, whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, statement::Statement, utils::HintSerialize}} + argh::FromArgs, }; fn main() -> Result<()> { @@ -301,25 +302,22 @@ fn main() -> Result<()> { .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) .expect("Writing gnark parameters to a file failed"); - // let spark_proof_gnark = SPARKProofGnark { - // transcript: spark_proof.transcript, - // io_pattern: spark_proof.io_pattern, - // whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), - // whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), - // whir_a3: WHIRConfigGnark::new(&spark_proof.whir_params.a_3batched), - // whir_b3: WHIRConfigGnark::new(&spark_proof.whir_params.b_3batched), - // whir_c3: WHIRConfigGnark::new(&spark_proof.whir_params.c_3batched), - // log_a_num_terms: next_power_of_two(padded_num_entries), - // log_b_num_terms: next_power_of_two(r1cs.b.num_entries()), - // log_c_num_terms: next_power_of_two(r1cs.c.num_entries()), - // }; - - // let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") - // .context("Error: Failed to create the spark proof file")?; - - // gnark_spark_proof_file - // .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) - // .expect("Writing spark gnark parameters to a file failed"); + let spark_proof_gnark = SPARKProofGnarkNew { + transcript: spark_proof.transcript, + io_pattern: spark_proof.io_pattern, + whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), + whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), + whir_3batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_3batched), + whir_5batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_5batched), + log_num_terms: next_power_of_two(padded_num_entries), + }; + + let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") + .context("Error: Failed to create the spark proof file")?; + + gnark_spark_proof_file + .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) + .expect("Writing spark gnark parameters to a file failed"); Ok(()) } diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 1456532f..5cb347f6 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -75,4 +75,15 @@ pub struct SPARKProofGnark { pub log_a_num_terms: usize, pub log_b_num_terms: usize, pub log_c_num_terms: usize, -} \ No newline at end of file +} + +#[derive(Serialize, Deserialize)] +pub struct SPARKProofGnarkNew { + pub transcript: Vec, + pub io_pattern: String, + pub whir_row: WHIRConfigGnark, + pub whir_col: WHIRConfigGnark, + pub whir_3batched: WHIRConfigGnark, + pub whir_5batched: WHIRConfigGnark, + pub log_num_terms: usize, +} From c8135bb44f948aaa6bcf5f5fcbc61bc9eb54b768 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 3 Oct 2025 15:37:18 +0800 Subject: [PATCH 25/34] Cleanup rust prover --- provekit/common/src/gnark.rs | 10 +- provekit/common/src/lib.rs | 4 +- provekit/common/src/spark.rs | 8 +- provekit/prover/src/whir_r1cs.rs | 29 ++- spark-prover/src/bin/generate_test_request.rs | 14 +- spark-prover/src/bin/spark-verifier.rs | 203 ++++++++-------- spark-prover/src/lib.rs | 4 - spark-prover/src/main.rs | 143 ++++++----- spark-prover/src/memory.rs | 61 ----- spark-prover/src/{ => utilities}/gpa.rs | 6 +- spark-prover/src/utilities/iopattern.rs | 26 ++ spark-prover/src/utilities/iopattern/mod.rs | 228 ------------------ spark-prover/src/utilities/matrix.rs | 23 ++ spark-prover/src/utilities/matrix/mod.rs | 117 --------- spark-prover/src/utilities/memory.rs | 33 +++ spark-prover/src/utilities/mod.rs | 44 ++-- spark-prover/src/{ => utilities}/spark.rs | 132 +++++----- spark-prover/src/{ => utilities}/whir.rs | 41 +--- 18 files changed, 423 insertions(+), 703 deletions(-) delete mode 100644 spark-prover/src/memory.rs rename spark-prover/src/{ => utilities}/gpa.rs (98%) create mode 100644 spark-prover/src/utilities/iopattern.rs delete mode 100644 spark-prover/src/utilities/iopattern/mod.rs create mode 100644 spark-prover/src/utilities/matrix.rs delete mode 100644 spark-prover/src/utilities/matrix/mod.rs create mode 100644 spark-prover/src/utilities/memory.rs rename spark-prover/src/{ => utilities}/spark.rs (78%) rename spark-prover/src/{ => utilities}/whir.rs (55%) diff --git a/provekit/common/src/gnark.rs b/provekit/common/src/gnark.rs index 4e3540eb..998b8789 100644 --- a/provekit/common/src/gnark.rs +++ b/provekit/common/src/gnark.rs @@ -1,6 +1,8 @@ -use serde::{Deserialize, Serialize}; -use crate::WhirConfig; -use ark_poly::EvaluationDomain; +use { + crate::WhirConfig, + ark_poly::EvaluationDomain, + serde::{Deserialize, Serialize}, +}; #[derive(Debug, Serialize, Deserialize)] @@ -71,4 +73,4 @@ impl WHIRConfigGnark { batch_size: whir_params.batch_size, } } -} \ No newline at end of file +} diff --git a/provekit/common/src/lib.rs b/provekit/common/src/lib.rs index e8358c81..9197d647 100644 --- a/provekit/common/src/lib.rs +++ b/provekit/common/src/lib.rs @@ -1,14 +1,14 @@ pub mod file; +pub mod gnark; mod interner; mod noir_proof_scheme; mod r1cs; pub mod skyscraper; +pub mod spark; mod sparse_matrix; pub mod utils; mod whir_r1cs; pub mod witness; -pub mod spark; -pub mod gnark; use crate::interner::{InternedFieldElement, Interner}; pub use { diff --git a/provekit/common/src/spark.rs b/provekit/common/src/spark.rs index 1921a7af..f89ec611 100644 --- a/provekit/common/src/spark.rs +++ b/provekit/common/src/spark.rs @@ -1,7 +1,7 @@ -use serde::{Deserialize, Serialize}; -use crate::FieldElement; -use crate::utils::serde_ark; - +use { + crate::{utils::serde_ark, FieldElement}, + serde::{Deserialize, Serialize}, +}; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Point { diff --git a/provekit/prover/src/whir_r1cs.rs b/provekit/prover/src/whir_r1cs.rs index 5356e7e4..65458535 100644 --- a/provekit/prover/src/whir_r1cs.rs +++ b/provekit/prover/src/whir_r1cs.rs @@ -1,6 +1,12 @@ use { - anyhow::{ensure, Result}, ark_ff::UniformRand, ark_std::{One, Zero}, provekit_common::{ - file::write, skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, spark::{self, ClaimedValues, Point, SPARKRequest}, utils::{ + anyhow::{ensure, Result}, + ark_ff::UniformRand, + ark_std::{One, Zero}, + provekit_common::{ + file::write, + skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, + spark::{self, ClaimedValues, Point, SPARKRequest}, + utils::{ pad_to_power_of_two, sumcheck::{ calculate_evaluations_over_boolean_hypercube_for_eq, @@ -9,11 +15,16 @@ use { }, zk_utils::{create_masked_polynomial, generate_random_multilinear_polynomial}, HALF, - }, FieldElement, IOPattern, SparseMatrix, WhirConfig, WhirR1CSProof, WhirR1CSScheme, R1CS - }, spongefish::{ + }, + FieldElement, IOPattern, SparseMatrix, WhirConfig, WhirR1CSProof, WhirR1CSScheme, R1CS, + }, + spongefish::{ codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, ProverState, - }, std::{fs::File, io::Write}, tracing::{info, instrument, warn}, whir::{ + }, + std::{fs::File, io::Write}, + tracing::{info, instrument, warn}, + whir::{ poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{ committer::{CommitmentWriter, Witness}, @@ -22,7 +33,7 @@ use { statement::{Statement, Weights}, utils::HintSerialize, }, - } + }, }; pub trait WhirR1CSProver { @@ -88,16 +99,16 @@ impl WhirR1CSProver for WhirR1CSScheme { let transcript = merlin.narg_string().to_vec(); - let spark_request: SPARKRequest = SPARKRequest { + let spark_request: SPARKRequest = SPARKRequest { point_to_evaluate: Point { row: alpha, col: whir_randomness.0, }, - claimed_values: ClaimedValues { + claimed_values: ClaimedValues { a: deferred_evaluations[0], b: deferred_evaluations[1], c: deferred_evaluations[2], - } + }, }; let mut spark_request_file = File::create("spark_request.json")?; // Creates or truncates the spark_request_file diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index 55adcb23..fdb562de 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -1,19 +1,19 @@ use { - provekit_common::{spark::{ClaimedValues, Point, SPARKRequest}, FieldElement}, + provekit_common::{ + spark::{ClaimedValues, Point, SPARKRequest}, + FieldElement, + }, std::{fs::File, io::Write}, }; fn main() { let mut row = vec![FieldElement::from(0); 8]; - let mut col = vec![FieldElement::from(0); 9]; + let col = vec![FieldElement::from(0); 9]; row[7] = FieldElement::from(1); - + let spark_request = SPARKRequest { - point_to_evaluate: Point { - row, - col, - }, + point_to_evaluate: Point { row, col }, claimed_values: ClaimedValues { a: FieldElement::from(1), b: FieldElement::from(0), diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index b6ade89c..da04734e 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -1,13 +1,26 @@ use { - anyhow::{ensure, Context, Result}, argh::FromArgs, ark_std::{One, Zero}, provekit_common::{ - skyscraper::SkyscraperSponge, spark::SPARKRequest, utils::{ + anyhow::{ensure, Context, Result}, + argh::FromArgs, + ark_std::{One, Zero}, + provekit_common::{ + skyscraper::SkyscraperSponge, + spark::SPARKRequest, + utils::{ next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, - }, FieldElement, IOPattern, WhirConfig - }, spark_prover::{utilities::SPARKProof, whir::SPARKWHIRConfigsNew}, spongefish::{ + }, + FieldElement, IOPattern, + }, + spark_prover::utilities::{whir::SPARKWHIRConfigsNew, SPARKProof}, + spongefish::{ codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, VerifierState, - }, std::{fs::{self, File}, path::PathBuf}, whir::{ + }, + std::{ + fs::{self}, + path::PathBuf, + }, + whir::{ poly_utils::multilinear::MultilinearPoint, whir::{ committer::CommitmentReader, @@ -15,11 +28,11 @@ use { utils::HintDeserialize, verifier::Verifier, }, - } + }, }; #[derive(FromArgs)] -#[argh(description="Spark Verifier CLI")] +#[argh(description = "Spark Verifier CLI")] struct Args { /// request #[argh(option)] @@ -30,37 +43,35 @@ struct Args { proof: PathBuf, } - fn main() -> Result<()> { let args: Args = argh::from_env(); - - let spark_proof_json_str = fs::read_to_string(args.proof) - .context("Error: Failed to open the proof file")?; + + let spark_proof_json_str = + fs::read_to_string(args.proof).context("Error: Failed to open the proof file")?; let spark_proof: SPARKProof = serde_json::from_str(&spark_proof_json_str) .context("Error: Failed to deserialize proof")?; - let request_json_str = fs::read_to_string(args.request) - .context("Error: Failed to open the request file")?; - let request: SPARKRequest = serde_json::from_str(&request_json_str) - .context("Error: Failed to deserialize request")?; + let request_json_str = + fs::read_to_string(args.request).context("Error: Failed to open the request file")?; + let request: SPARKRequest = + serde_json::from_str(&request_json_str).context("Error: Failed to deserialize request")?; let io = IOPattern::from_string(spark_proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&spark_proof.transcript); - let point_row: Vec = arthur.hint()?; - let point_col: Vec = arthur.hint()?; - + let _point_row: Vec = arthur.hint()?; + let _point_col: Vec = arthur.hint()?; + let mut claimed_values = [FieldElement::from(0); 3]; arthur.fill_next_scalars(&mut claimed_values)?; let mut matrix_batching_randomness = [FieldElement::from(0); 1]; arthur.fill_challenge_scalars(&mut matrix_batching_randomness)?; let matrix_batching_randomness = matrix_batching_randomness[0]; - - let claimed_value = - claimed_values[0] - + claimed_values[1] * matrix_batching_randomness - + claimed_values[2] * matrix_batching_randomness * matrix_batching_randomness; + + let claimed_value = claimed_values[0] + + claimed_values[1] * matrix_batching_randomness + + claimed_values[2] * matrix_batching_randomness * matrix_batching_randomness; verify_spark_single_matrix( &matrix_batching_randomness, @@ -68,7 +79,7 @@ fn main() -> Result<()> { spark_proof.matrix_dimensions.num_rows, spark_proof.matrix_dimensions.num_cols, spark_proof.matrix_dimensions.nonzero_terms, - &mut arthur, + &mut arthur, &request, &claimed_value, )?; @@ -86,10 +97,9 @@ pub fn verify_spark_single_matrix( request: &SPARKRequest, claimed_value: &FieldElement, ) -> Result<()> { - println!("{:?}", claimed_value); //Reilabs Debug: let commitment_reader_row = CommitmentReader::new(&whir_params.row); let commitment_reader_col = CommitmentReader::new(&whir_params.col); - + // Matrix A let a_3batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_3batched); @@ -98,32 +108,25 @@ pub fn verify_spark_single_matrix( let a_sumcheck_commitment = a_5batched_commitment_reader.parse_commitment(arthur)?; let a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; let a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - + let a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur).unwrap(); let a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur).unwrap(); - // Matrix A - Sumcheck - - let (randomness, a_last_sumcheck_value) = run_sumcheck_verifier_spark( - arthur, - next_power_of_two(num_nonzero_terms), - *claimed_value, - ) - .context("While verifying SPARK sumcheck")?; + // Matrix A - Sumcheck - println!("{:?}", a_last_sumcheck_value); //Reilabs Debug: + let (randomness, a_last_sumcheck_value) = + run_sumcheck_verifier_spark(arthur, next_power_of_two(num_nonzero_terms), *claimed_value) + .context("While verifying SPARK sumcheck")?; let final_folds: Vec = arthur.hint()?; - let claimed_val = - final_folds[0] + - final_folds[1] * matrix_batching_randomness + - final_folds[2] * matrix_batching_randomness * matrix_batching_randomness; + let claimed_val = final_folds[0] + + final_folds[1] * matrix_batching_randomness + + final_folds[2] * matrix_batching_randomness * matrix_batching_randomness; assert!(a_last_sumcheck_value == claimed_val * final_folds[3] * final_folds[4]); - let mut a_spark_sumcheck_statement_verifier = Statement::::new(next_power_of_two( - num_nonzero_terms, - )); + let mut a_spark_sumcheck_statement_verifier = + Statement::::new(next_power_of_two(num_nonzero_terms)); let mut batching_randomness = Vec::with_capacity(5); let mut cur = FieldElement::from(1); @@ -134,27 +137,28 @@ pub fn verify_spark_single_matrix( a_spark_sumcheck_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0] * batching_randomness[0] + - final_folds[1] * batching_randomness[1] + - final_folds[2] * batching_randomness[2] + - final_folds[3] * batching_randomness[3] + - final_folds[4] * batching_randomness[4] + final_folds[0] * batching_randomness[0] + + final_folds[1] * batching_randomness[1] + + final_folds[2] * batching_randomness[2] + + final_folds[3] * batching_randomness[3] + + final_folds[4] * batching_randomness[4], ); let a_spark_sumcheck_verifier = Verifier::new(&whir_params.num_terms_5batched); - a_spark_sumcheck_verifier.verify(arthur, &a_sumcheck_commitment, &a_spark_sumcheck_statement_verifier)?; + a_spark_sumcheck_verifier.verify( + arthur, + &a_sumcheck_commitment, + &a_spark_sumcheck_statement_verifier, + )?; - // Matrix A - Rowwise + // Matrix A - Rowwise let mut tau_and_gamma = [FieldElement::from(0); 2]; arthur.fill_challenge_scalars(&mut tau_and_gamma)?; let tau = tau_and_gamma[0]; let gamma = tau_and_gamma[1]; - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_rows) + 2, - )?; + let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_rows) + 2)?; let claimed_init = gpa_result.claimed_values[0]; let claimed_final = gpa_result.claimed_values[1]; @@ -171,8 +175,7 @@ pub fn verify_spark_single_matrix( let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; let final_cntr: FieldElement = arthur.hint()?; - let mut final_cntr_statement = - Statement::::new(next_power_of_two(num_rows)); + let mut final_cntr_statement = Statement::::new(next_power_of_two(num_rows)); final_cntr_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), final_cntr, @@ -194,14 +197,9 @@ pub fn verify_spark_single_matrix( let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + final_opening * last_randomness[0]; - println!("Rowwise init {:?}", evaluated_value); //Reilabs Debug: - println!("Rowwise init {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_nonzero_terms) + 2, - )?; + let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_nonzero_terms) + 2)?; let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); @@ -213,28 +211,32 @@ pub fn verify_spark_single_matrix( let rs_timestamp: FieldElement = arthur.hint()?; let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - - let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - + ws_opening * last_randomness[0]; + let ws_opening = + rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = + rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - println!("Rowwise rs {:?}", evaluated_value); //Reilabs Debug: - println!("Rowwise rs {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let mut a_spark_rowwise_statement_verifier = Statement::::new(next_power_of_two( - num_nonzero_terms, - )); + let mut a_spark_rowwise_statement_verifier = + Statement::::new(next_power_of_two(num_nonzero_terms)); a_spark_rowwise_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr + - rs_mem * a_rowwise_commitment.batching_randomness + - rs_timestamp * a_rowwise_commitment.batching_randomness * a_rowwise_commitment.batching_randomness, + rs_adr + + rs_mem * a_rowwise_commitment.batching_randomness + + rs_timestamp + * a_rowwise_commitment.batching_randomness + * a_rowwise_commitment.batching_randomness, ); let a_rowwise_verifier = Verifier::new(&whir_params.num_terms_3batched); - a_rowwise_verifier.verify(arthur, &a_rowwise_commitment, &a_spark_rowwise_statement_verifier)?; + a_rowwise_verifier.verify( + arthur, + &a_rowwise_commitment, + &a_spark_rowwise_statement_verifier, + )?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -247,10 +249,7 @@ pub fn verify_spark_single_matrix( // Colwise Init Final GPA - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_cols) + 2, - )?; + let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_cols) + 2)?; let claimed_init = gpa_result.claimed_values[0]; let claimed_final = gpa_result.claimed_values[1]; @@ -268,8 +267,7 @@ pub fn verify_spark_single_matrix( let final_cntr: FieldElement = arthur.hint()?; - let mut final_cntr_statement = - Statement::::new(next_power_of_two(num_cols)); + let mut final_cntr_statement = Statement::::new(next_power_of_two(num_cols)); final_cntr_statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), final_cntr, @@ -291,16 +289,11 @@ pub fn verify_spark_single_matrix( let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + final_opening * last_randomness[0]; - println!("Colwise init {:?}", evaluated_value); //Reilabs Debug: - println!("Colwise init {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); // Colwise RS WS GPA - let gpa_result = gpa_sumcheck_verifier( - arthur, - next_power_of_two(num_nonzero_terms) + 2, - )?; + let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_nonzero_terms) + 2)?; let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); @@ -312,28 +305,32 @@ pub fn verify_spark_single_matrix( let rs_timestamp: FieldElement = arthur.hint()?; let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - - let evaluated_value = rs_opening * (FieldElement::one() - last_randomness[0]) - + ws_opening * last_randomness[0]; + let ws_opening = + rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = + rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - println!("Colwise rs {:?}", evaluated_value); //Reilabs Debug: - println!(" {:?}", gpa_result.a_last_sumcheck_value); //Reilabs Debug: ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - let mut a_spark_colwise_statement_verifier = Statement::::new(next_power_of_two( - num_nonzero_terms, - )); + let mut a_spark_colwise_statement_verifier = + Statement::::new(next_power_of_two(num_nonzero_terms)); a_spark_colwise_statement_verifier.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr + - rs_mem * a_colwise_commitment.batching_randomness + - rs_timestamp * a_colwise_commitment.batching_randomness * a_colwise_commitment.batching_randomness, + rs_adr + + rs_mem * a_colwise_commitment.batching_randomness + + rs_timestamp + * a_colwise_commitment.batching_randomness + * a_colwise_commitment.batching_randomness, ); let a_colwise_verifier = Verifier::new(&whir_params.num_terms_3batched); - a_colwise_verifier.verify(arthur, &a_colwise_commitment, &a_spark_colwise_statement_verifier)?; + a_colwise_verifier.verify( + arthur, + &a_colwise_commitment, + &a_spark_colwise_statement_verifier, + )?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -372,7 +369,7 @@ pub fn gpa_sumcheck_verifier( arthur: &mut VerifierState, height_of_binary_tree: usize, ) -> Result { - let mut prev_rand = Vec::::new(); + let mut prev_rand; let mut rand = Vec::::new(); let mut claimed_values = [FieldElement::from(0); 2]; let mut l = [FieldElement::from(0); 2]; @@ -431,9 +428,9 @@ pub fn gpa_sumcheck_verifier( } pub struct GPASumcheckResult { - pub claimed_values: Vec, + pub claimed_values: Vec, pub a_last_sumcheck_value: FieldElement, - pub randomness: Vec, + pub randomness: Vec, } pub fn eval_linear_poly(poly: &[FieldElement], point: &FieldElement) -> FieldElement { diff --git a/spark-prover/src/lib.rs b/spark-prover/src/lib.rs index c2e08d50..89db1662 100644 --- a/spark-prover/src/lib.rs +++ b/spark-prover/src/lib.rs @@ -1,5 +1 @@ -pub mod gpa; -pub mod memory; -pub mod spark; pub mod utilities; -pub mod whir; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 925aa3f0..968cf740 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,38 +1,56 @@ use { - anyhow::{Context, Result}, ark_ff::AdditiveGroup, ark_std::rand::seq::index, provekit_common::{file::write, gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme}, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::{ - memory::{calculate_e_values_for_r1cs, calculate_memory, EValuesForMatrix, Memory}, - spark::{prove_spark_for_single_matrix, run_spark_sumcheck}, - utilities::{ - calculate_matrix_dimensions, create_io_pattern, deserialize_r1cs, deserialize_request, get_spark_r1cs, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, COOMatrixNew, SparkMatrix, SparkMatrixNew, TimeStamps}, MatrixDimensions, MatrixDimensionsNew, SPARKProof, SPARKProofGnark, SPARKProofGnarkNew - }, - whir::{create_whir_configs, SPARKWHIRConfigsNew}, - }, spongefish::codecs::arkworks_algebra::{FieldDomainSeparator, FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, mem}, whir::{poly_utils::evals::EvaluationsList, whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, statement::Statement, utils::HintSerialize}} - argh::FromArgs, + anyhow::{Context, Result}, + ark_ff::AdditiveGroup, + provekit_common::{ + gnark::WHIRConfigGnark, + utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, + FieldElement, IOPattern, WhirR1CSScheme, + }, + provekit_r1cs_compiler::WhirR1CSSchemeBuilder, + spark_prover::utilities::{ + deserialize_r1cs, deserialize_request, + iopattern::SPARKDomainSeparator, + matrix::{COOMatrix, SparkMatrix, TimeStamps}, + memory::{calculate_memory, EValuesForMatrix}, + spark::prove_spark_for_single_matrix, + whir::SPARKWHIRConfigsNew, + MatrixDimensionsNew, SPARKProof, SPARKProofGnarkNew, + }, + spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + std::{collections::BTreeMap, fs::File, io::Write}, + whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize}, }; fn main() -> Result<()> { - // Run once when receiving the matrix let r1cs = deserialize_r1cs("spark-prover/r1cs.json") .context("Error: Failed to create the R1CS object")?; // get combined matrix non-zero value coordinates - let mut combined_matrix_map: BTreeMap<(usize, usize), FieldElement> = r1cs.a().iter().map(|(coordinate, _)| (coordinate, FieldElement::ZERO)).collect(); + let mut combined_matrix_map: BTreeMap<(usize, usize), FieldElement> = r1cs + .a() + .iter() + .map(|(coordinate, _)| (coordinate, FieldElement::ZERO)) + .collect(); for (coordinate, _) in r1cs.b().iter() { - combined_matrix_map.entry(coordinate).or_insert(FieldElement::ZERO); + combined_matrix_map + .entry(coordinate) + .or_insert(FieldElement::ZERO); } for (coordinate, _) in r1cs.c().iter() { - combined_matrix_map.entry(coordinate).or_insert(FieldElement::ZERO); + combined_matrix_map + .entry(coordinate) + .or_insert(FieldElement::ZERO); } // generate padded row and col let originial_num_entries = combined_matrix_map.keys().count(); - let padded_num_entries = 1< Result<()> { col.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); // generate val vectors + let mut val_a = vec![FieldElement::ZERO; padded_num_entries]; let mut val_b = vec![FieldElement::ZERO; padded_num_entries]; let mut val_c = vec![FieldElement::ZERO; padded_num_entries]; @@ -54,7 +73,7 @@ fn main() -> Result<()> { let mut a_iter = a_binding.iter(); let mut b_iter = b_binding.iter(); let mut c_iter = c_binding.iter(); - + let mut a_cur = a_iter.next(); let mut b_cur = b_iter.next(); let mut c_cur = c_iter.next(); @@ -83,7 +102,7 @@ fn main() -> Result<()> { } // generate padded timestamps - + let mut read_row_counters = vec![0; r1cs.num_constraints()]; let mut read_col_counters = vec![0; r1cs.num_witnesses()]; let mut read_row = Vec::with_capacity(padded_num_entries); @@ -126,15 +145,20 @@ fn main() -> Result<()> { e_rx.push(memory.eq_rx[*r]); e_ry.push(memory.eq_ry[*c]); } - + e_rx.extend(std::iter::repeat(memory.eq_rx[0]).take(to_fill)); e_ry.extend(std::iter::repeat(memory.eq_ry[0]).take(to_fill)); - + // Create whir config - let row_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_constraints()), 1); - let col_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_witnesses()), 1); - let num_terms_3batched_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); - let num_terms_5batched_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 5); + + let row_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_constraints()), 1); + let col_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_witnesses()), 1); + let num_terms_3batched_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); + let num_terms_5batched_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 5); // Create io_pattern let mut io = IOPattern::new("💥"); @@ -145,7 +169,6 @@ fn main() -> Result<()> { .hint("point_col") .add_claimed_evaluations(); - io = io .commit_statement(&num_terms_5batched_config) .commit_statement(&num_terms_3batched_config) @@ -155,7 +178,7 @@ fn main() -> Result<()> { .add_sumcheck_polynomials(next_power_of_two(padded_num_entries)) .hint("sumcheck_last_folds") .add_whir_proof(&num_terms_5batched_config); - + // Rowwise io = io.add_tau_and_gamma(); @@ -169,8 +192,6 @@ fn main() -> Result<()> { .hint("row_final_counter_claimed_evaluation") .add_whir_proof(&row_config); - // Can I send all hints once in struct? - for i in 0..=next_power_of_two(padded_num_entries) { io = io.add_sumcheck_polynomials(i); io = io.add_line(); @@ -206,7 +227,6 @@ fn main() -> Result<()> { .hint("col_rs_timestamp_claimed_evaluation") .add_whir_proof(&num_terms_3batched_config); - // Prover let mut merlin = io.to_prover_state(); @@ -214,23 +234,34 @@ fn main() -> Result<()> { merlin.hint(&request.point_to_evaluate.row)?; merlin.hint(&request.point_to_evaluate.col)?; - // Calculate the RLC of the matrices (can be also calculated from rlc of val_a, val_b, val_c) - merlin.add_scalars(&[request.claimed_values.a, request.claimed_values.b, request.claimed_values.c])?; + // Calculate the RLC of the matrices + // Note: can be also calculated from rlc of val_a, val_b, val_c + merlin.add_scalars(&[ + request.claimed_values.a, + request.claimed_values.b, + request.claimed_values.c, + ])?; let mut matrix_batching_randomness = [FieldElement::ZERO; 1]; merlin.fill_challenge_scalars(&mut matrix_batching_randomness)?; let matrix_batching_randomness = matrix_batching_randomness[0]; let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; for (coordinate, value) in r1cs.a().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value); + combined_matrix_map + .entry(coordinate) + .and_modify(|cur| *cur += value); } for (coordinate, value) in r1cs.b().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness); + combined_matrix_map + .entry(coordinate) + .and_modify(|cur| *cur += value * matrix_batching_randomness); } for (coordinate, value) in r1cs.c().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| *cur += value * matrix_batching_randomness_sq); + combined_matrix_map + .entry(coordinate) + .and_modify(|cur| *cur += value * matrix_batching_randomness_sq); } let mut val = Vec::with_capacity(padded_num_entries); @@ -239,15 +270,14 @@ fn main() -> Result<()> { } val.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); - let claimed_value = - request.claimed_values.a + - request.claimed_values.b * matrix_batching_randomness + - request.claimed_values.c * matrix_batching_randomness_sq; + let claimed_value = request.claimed_values.a + + request.claimed_values.b * matrix_batching_randomness + + request.claimed_values.c * matrix_batching_randomness_sq; - // Bundle values + // - let spark_matrix = SparkMatrixNew { - coo: COOMatrixNew{ + let spark_matrix = SparkMatrix { + coo: COOMatrix { row, col, val, @@ -260,21 +290,18 @@ fn main() -> Result<()> { read_col, final_row, final_col, - } + }, }; - let e_values = EValuesForMatrix { - e_rx, - e_ry, - }; + let e_values = EValuesForMatrix { e_rx, e_ry }; let configs = SPARKWHIRConfigsNew { - row: row_config, - col: col_config, + row: row_config, + col: col_config, num_terms_3batched: num_terms_3batched_config, num_terms_5batched: num_terms_5batched_config, }; - + prove_spark_for_single_matrix( &mut merlin, spark_matrix, @@ -289,8 +316,8 @@ fn main() -> Result<()> { io_pattern: String::from_utf8(io.as_bytes().to_vec()).unwrap(), whir_params: configs, matrix_dimensions: MatrixDimensionsNew { - num_rows: r1cs.num_constraints(), - num_cols: r1cs.num_witnesses(), + num_rows: r1cs.num_constraints(), + num_cols: r1cs.num_witnesses(), nonzero_terms: originial_num_entries, }, }; @@ -303,10 +330,10 @@ fn main() -> Result<()> { .expect("Writing gnark parameters to a file failed"); let spark_proof_gnark = SPARKProofGnarkNew { - transcript: spark_proof.transcript, - io_pattern: spark_proof.io_pattern, - whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), - whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), + transcript: spark_proof.transcript, + io_pattern: spark_proof.io_pattern, + whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), + whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), whir_3batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_3batched), whir_5batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_5batched), log_num_terms: next_power_of_two(padded_num_entries), @@ -316,7 +343,11 @@ fn main() -> Result<()> { .context("Error: Failed to create the spark proof file")?; gnark_spark_proof_file - .write_all(serde_json::to_string(&spark_proof_gnark).unwrap().as_bytes()) + .write_all( + serde_json::to_string(&spark_proof_gnark) + .unwrap() + .as_bytes(), + ) .expect("Writing spark gnark parameters to a file failed"); Ok(()) diff --git a/spark-prover/src/memory.rs b/spark-prover/src/memory.rs deleted file mode 100644 index 9d7ebab0..00000000 --- a/spark-prover/src/memory.rs +++ /dev/null @@ -1,61 +0,0 @@ -use { - provekit_common::{ - spark::Point, utils::{next_power_of_two, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, R1CS - }, -}; - -#[derive(Debug)] -pub struct Memory { - pub eq_rx: Vec, - pub eq_ry: Vec, -} - -#[derive(Debug)] -pub struct EValuesForMatrix { - pub e_rx: Vec, - pub e_ry: Vec, -} - -#[derive(Debug)] -pub struct EValues { - pub a: EValuesForMatrix, - pub b: EValuesForMatrix, - pub c: EValuesForMatrix, -} - -pub fn calculate_memory(point_to_evaluate: Point) -> Memory { - Memory { - eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), - eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col[1..]).iter().map(|x| *x * (FieldElement::from(1) - point_to_evaluate.col[0])).collect(), - } -} - -pub fn calculate_e_values_for_r1cs(memory: &Memory, r1cs: &R1CS) -> EValues { - EValues { - a: calculate_e_values_for_matrix(memory, &r1cs.a()), - b: calculate_e_values_for_matrix(memory, &r1cs.b()), - c: calculate_e_values_for_matrix(memory, &r1cs.c()), - } -} - -pub fn calculate_e_values_for_matrix( - memory: &Memory, - matrix: &HydratedSparseMatrix, -) -> EValuesForMatrix { - let mut e_rx = Vec::::new(); - let mut e_ry = Vec::::new(); - - for ((r, c), _) in matrix.iter() { - e_rx.push(memory.eq_rx[r]); - e_ry.push(memory.eq_ry[c]); - } - - let to_pad = (1< Self; + fn add_line(self) -> Self; + fn add_claimed_evaluations(self) -> Self; +} + +impl SPARKDomainSeparator for IOPattern +where + IOPattern: FieldDomainSeparator, +{ + fn add_tau_and_gamma(self) -> Self { + self.challenge_scalars(2, "tau and gamma") + } + + fn add_line(self) -> Self { + self.add_scalars(2, "gpa line") + .challenge_scalars(1, "gpa line random") + } + + fn add_claimed_evaluations(self) -> Self { + self.add_scalars(3, "claimed evaluations") + .challenge_scalars(1, "matrix combination randomness") + } +} diff --git a/spark-prover/src/utilities/iopattern/mod.rs b/spark-prover/src/utilities/iopattern/mod.rs deleted file mode 100644 index 3657a3f6..00000000 --- a/spark-prover/src/utilities/iopattern/mod.rs +++ /dev/null @@ -1,228 +0,0 @@ -use { - crate::whir::SPARKWHIRConfigs, - provekit_common::{ - utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, - FieldElement, IOPattern, R1CS, - }, - spongefish::codecs::arkworks_algebra::FieldDomainSeparator, - whir::whir::domainsep::WhirDomainSeparator, -}; - -pub trait SPARKDomainSeparator { - fn add_tau_and_gamma(self) -> Self; - fn add_line(self) -> Self; - fn add_claimed_evaluations(self) -> Self; -} - -impl SPARKDomainSeparator for IOPattern -where - IOPattern: FieldDomainSeparator, -{ - fn add_tau_and_gamma(self) -> Self { - self.challenge_scalars(2, "tau and gamma") - } - - fn add_line(self) -> Self { - self.add_scalars(2, "gpa line") - .challenge_scalars(1, "gpa line random") - } - - fn add_claimed_evaluations(self) -> Self { - self.add_scalars(3, "claimed evaluations") - .challenge_scalars(1, "matrix combination randomness") - } -} - -pub fn create_io_pattern(r1cs: &R1CS, configs: &SPARKWHIRConfigs) -> IOPattern { - let mut io = IOPattern::new("💥"); - - // Matrix A - io = io - .hint("claimed_a") - .hint("claimed_b") - .hint("claimed_c") - .hint("point_row") - .hint("point_col"); - - - io = io - .commit_statement(&configs.a_3batched) - .commit_statement(&configs.a_3batched) - .commit_statement(&configs.a_3batched) - .commit_statement(&configs.row) - .commit_statement(&configs.col) - .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.a_3batched); - - // Rowwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.a.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_final_counter_claimed_evaluation") - .add_whir_proof(&configs.row); - - // Can I send all hints once in struct? - - for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_rs_address_claimed_evaluation") - .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.a_3batched); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.a.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_final_counter_claimed_evaluation") - .add_whir_proof(&configs.col); - - for i in 0..=next_power_of_two(r1cs.a.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_rs_address_claimed_evaluation") - .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.a_3batched); - - // Matrix B - - io = io - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.b_3batched) - .commit_statement(&configs.row) - .commit_statement(&configs.col) - .add_sumcheck_polynomials(next_power_of_two(r1cs.a.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.b_3batched); - - // Rowwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.b.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_final_counter_claimed_evaluation") - .add_whir_proof(&configs.row); - - for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_rs_address_claimed_evaluation") - .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.b_3batched); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.b.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_final_counter_claimed_evaluation") - .add_whir_proof(&configs.col); - - for i in 0..=next_power_of_two(r1cs.b.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_rs_address_claimed_evaluation") - .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.b_3batched); - - // Matrix C - - io = io - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.c_3batched) - .commit_statement(&configs.row) - .commit_statement(&configs.col) - .add_sumcheck_polynomials(next_power_of_two(r1cs.c.num_entries())) - .hint("sumcheck_last_folds") - .add_whir_proof(&configs.c_3batched); - - // Rowwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.c.num_rows) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_final_counter_claimed_evaluation") - .add_whir_proof(&configs.row); - - for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_rs_address_claimed_evaluation") - .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.c_3batched); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.c.num_cols) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_final_counter_claimed_evaluation") - .add_whir_proof(&configs.col); - - for i in 0..=next_power_of_two(r1cs.c.num_entries()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_rs_address_claimed_evaluation") - .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation") - .add_whir_proof(&configs.c_3batched); - io -} diff --git a/spark-prover/src/utilities/matrix.rs b/spark-prover/src/utilities/matrix.rs new file mode 100644 index 00000000..31f02778 --- /dev/null +++ b/spark-prover/src/utilities/matrix.rs @@ -0,0 +1,23 @@ +use provekit_common::FieldElement; + +#[derive(Debug)] +pub struct SparkMatrix { + pub coo: COOMatrix, + pub timestamps: TimeStamps, +} +#[derive(Debug)] +pub struct COOMatrix { + pub row: Vec, + pub col: Vec, + pub val: Vec, + pub val_a: Vec, + pub val_b: Vec, + pub val_c: Vec, +} +#[derive(Debug)] +pub struct TimeStamps { + pub read_row: Vec, + pub read_col: Vec, + pub final_row: Vec, + pub final_col: Vec, +} diff --git a/spark-prover/src/utilities/matrix/mod.rs b/spark-prover/src/utilities/matrix/mod.rs deleted file mode 100644 index e3d19521..00000000 --- a/spark-prover/src/utilities/matrix/mod.rs +++ /dev/null @@ -1,117 +0,0 @@ -use ark_ff::Field; -use provekit_common::{utils::next_power_of_two, FieldElement, HydratedSparseMatrix, SparseMatrix, R1CS}; - -#[derive(Debug)] -pub struct SparkR1CS { - pub a: SparkMatrix, - pub b: SparkMatrix, - pub c: SparkMatrix, -} -#[derive(Debug)] -pub struct SparkMatrix { - pub coo: COOMatrix, - pub timestamps: TimeStamps, -} -#[derive(Debug)] -pub struct SparkMatrixNew { - pub coo: COOMatrixNew, - pub timestamps: TimeStamps, -} -#[derive(Debug)] -pub struct COOMatrix { - pub row: Vec, - pub col: Vec, - pub val: Vec, -} -#[derive(Debug)] -pub struct COOMatrixNew { - pub row: Vec, - pub col: Vec, - pub val: Vec, - pub val_a: Vec, - pub val_b: Vec, - pub val_c: Vec, -} -#[derive(Debug)] -pub struct TimeStamps { - pub read_row: Vec, - pub read_col: Vec, - pub final_row: Vec, - pub final_col: Vec, -} - -pub fn get_spark_r1cs(r1cs: &R1CS) -> SparkR1CS { - SparkR1CS { - a: get_spark_matrix(&r1cs.a()), - b: get_spark_matrix(&r1cs.b()), - c: get_spark_matrix(&r1cs.c()), - } -} - -pub fn get_spark_matrix(matrix: &HydratedSparseMatrix) -> SparkMatrix { - SparkMatrix { - coo: get_coordinate_rep_of_a_matrix(matrix), - timestamps: calculate_timestamps(matrix), - } -} - -pub fn get_coordinate_rep_of_a_matrix(matrix: &HydratedSparseMatrix) -> COOMatrix { - let mut row = Vec::::new(); - let mut col = Vec::::new(); - let mut val = Vec::::new(); - - for ((r, c), value) in matrix.iter() { - row.push(FieldElement::from(r as u64)); - col.push(FieldElement::from(c as u64)); - val.push(value.clone()); - } - - let to_pad = (1< TimeStamps { - let mut read_row_counters = vec![0; matrix.matrix.num_rows]; - let mut read_row = Vec::::new(); - let mut read_col_counters = vec![0; matrix.matrix.num_cols]; - let mut read_col = Vec::::new(); - - for ((r, c), _) in matrix.iter() { - read_row.push(FieldElement::from(read_row_counters[r] as u64)); - read_row_counters[r] += 1; - read_col.push(FieldElement::from(read_col_counters[c] as u64)); - read_col_counters[c] += 1; - } - - let to_pad = (1<>(); - - let final_col = read_col_counters - .iter() - .map(|&x| FieldElement::from(x as u64)) - .collect::>(); - - TimeStamps { - read_row, - read_col, - final_row, - final_col, - } -} diff --git a/spark-prover/src/utilities/memory.rs b/spark-prover/src/utilities/memory.rs new file mode 100644 index 00000000..e3b6420e --- /dev/null +++ b/spark-prover/src/utilities/memory.rs @@ -0,0 +1,33 @@ +use provekit_common::{ + spark::Point, utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, + FieldElement, +}; + +#[derive(Debug)] +pub struct Memory { + pub eq_rx: Vec, + pub eq_ry: Vec, +} + +#[derive(Debug)] +pub struct EValuesForMatrix { + pub e_rx: Vec, + pub e_ry: Vec, +} + +#[derive(Debug)] +pub struct EValues { + pub a: EValuesForMatrix, + pub b: EValuesForMatrix, + pub c: EValuesForMatrix, +} + +pub fn calculate_memory(point_to_evaluate: Point) -> Memory { + Memory { + eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), + eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col[1..]) + .iter() + .map(|x| *x * (FieldElement::from(1) - point_to_evaluate.col[0])) + .collect(), + } +} diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 5cb347f6..6866d56b 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -1,22 +1,27 @@ +pub mod gpa; pub mod iopattern; pub mod matrix; +pub mod memory; +pub mod spark; +pub mod whir; + use { - crate::whir::{SPARKWHIRConfigs, SPARKWHIRConfigsNew}, + crate::utilities::whir::SPARKWHIRConfigsNew, anyhow::{Context, Result}, provekit_common::{ - gnark::WHIRConfigGnark, spark::SPARKRequest, utils::{next_power_of_two, serde_ark, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, HydratedSparseMatrix, WhirConfig, R1CS + gnark::WHIRConfigGnark, spark::SPARKRequest, utils::next_power_of_two, R1CS, }, serde::{Deserialize, Serialize}, std::fs, }; -pub use {iopattern::create_io_pattern, matrix::get_spark_r1cs}; pub fn deserialize_r1cs(path_str: &str) -> Result { let json_str = fs::read_to_string(path_str).context("Error: Failed to open the r1cs.json file")?; - let mut r1cs: R1CS = serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; + let mut r1cs: R1CS = + serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; r1cs.grow_matrices( - 1< MatrixDimensions { @@ -65,13 +69,13 @@ pub fn calculate_matrix_dimensions(r1cs: &R1CS) -> MatrixDimensions { #[derive(Serialize, Deserialize)] pub struct SPARKProofGnark { - pub transcript: Vec, - pub io_pattern: String, - pub whir_row: WHIRConfigGnark, - pub whir_col: WHIRConfigGnark, - pub whir_a3: WHIRConfigGnark, - pub whir_b3: WHIRConfigGnark, - pub whir_c3: WHIRConfigGnark, + pub transcript: Vec, + pub io_pattern: String, + pub whir_row: WHIRConfigGnark, + pub whir_col: WHIRConfigGnark, + pub whir_a3: WHIRConfigGnark, + pub whir_b3: WHIRConfigGnark, + pub whir_c3: WHIRConfigGnark, pub log_a_num_terms: usize, pub log_b_num_terms: usize, pub log_c_num_terms: usize, @@ -79,10 +83,10 @@ pub struct SPARKProofGnark { #[derive(Serialize, Deserialize)] pub struct SPARKProofGnarkNew { - pub transcript: Vec, - pub io_pattern: String, - pub whir_row: WHIRConfigGnark, - pub whir_col: WHIRConfigGnark, + pub transcript: Vec, + pub io_pattern: String, + pub whir_row: WHIRConfigGnark, + pub whir_col: WHIRConfigGnark, pub whir_3batched: WHIRConfigGnark, pub whir_5batched: WHIRConfigGnark, pub log_num_terms: usize, diff --git a/spark-prover/src/spark.rs b/spark-prover/src/utilities/spark.rs similarity index 78% rename from spark-prover/src/spark.rs rename to spark-prover/src/utilities/spark.rs index d59ff127..7818a3b6 100644 --- a/spark-prover/src/spark.rs +++ b/spark-prover/src/utilities/spark.rs @@ -1,11 +1,11 @@ use { - crate::{ + crate::utilities::{ gpa::run_gpa, + matrix::SparkMatrix, memory::{EValuesForMatrix, Memory}, - utilities::matrix::{SparkMatrix, SparkMatrixNew}, whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigsNew}, }, - anyhow::{ensure, Result}, + anyhow::Result, itertools::izip, provekit_common::{ skyscraper::SkyscraperSponge, @@ -13,21 +13,21 @@ use { sumcheck::{eval_cubic_poly, sumcheck_fold_map_reduce}, HALF, }, - FieldElement, WhirConfig, + FieldElement, }, spongefish::{ codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, ProverState, }, whir::{ - poly_utils::{evals::EvaluationsList, fold, multilinear::MultilinearPoint}, - whir::{committer::CommitmentWriter, prover::Prover, statement::{Statement, Weights}, utils::HintSerialize}, + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{committer::CommitmentWriter, utils::HintSerialize}, }, }; pub fn prove_spark_for_single_matrix( merlin: &mut ProverState, - matrix: SparkMatrixNew, + matrix: SparkMatrix, memory: &Memory, e_values: EValuesForMatrix, claimed_value: FieldElement, @@ -58,9 +58,11 @@ pub fn prove_spark_for_single_matrix( EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), ])?; - let final_row_ts_witness = commit_to_vector(&row_committer, merlin, matrix.timestamps.final_row.clone()); - let final_col_ts_witness = commit_to_vector(&col_committer, merlin, matrix.timestamps.final_col.clone()); - + let final_row_ts_witness = + commit_to_vector(&row_committer, merlin, matrix.timestamps.final_row.clone()); + let final_col_ts_witness = + commit_to_vector(&col_committer, merlin, matrix.timestamps.final_col.clone()); + // Sumcheck let mles = [ @@ -79,9 +81,16 @@ pub fn prove_spark_for_single_matrix( let val_c_eval = EvaluationsList::new(matrix.coo.val_c.clone()) .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); - merlin.hint::>(&[val_a_eval, val_b_eval, val_c_eval, sumcheck_final_folds[1], sumcheck_final_folds[2]].to_vec())?; - - let mut sumcheck_statement = Statement::::new(folding_randomness.len()); + merlin.hint::>( + &[ + val_a_eval, + val_b_eval, + val_c_eval, + sumcheck_final_folds[1], + sumcheck_final_folds[2], + ] + .to_vec(), + )?; let mut batching_randomness = Vec::with_capacity(5); let mut cur = FieldElement::from(1); @@ -90,24 +99,19 @@ pub fn prove_spark_for_single_matrix( cur *= sumcheck_witness.batching_randomness; } - - - let claimed_batched_value = - val_a_eval * batching_randomness[0] + - val_b_eval * batching_randomness[1] + - val_c_eval * batching_randomness[2] + - sumcheck_final_folds[1] * batching_randomness[3] + - sumcheck_final_folds[2] * batching_randomness[4]; + let claimed_batched_value = val_a_eval * batching_randomness[0] + + val_b_eval * batching_randomness[1] + + val_c_eval * batching_randomness[2] + + sumcheck_final_folds[1] * batching_randomness[3] + + sumcheck_final_folds[2] * batching_randomness[4]; - println!("{:?}", batching_randomness); //Reilabs Debug: - println!("{:?}", claimed_batched_value); //Reilabs Debug: - println!("{:?}", sumcheck_witness.batched_poly().evaluate(&MultilinearPoint(folding_randomness.to_vec().clone()))); //Reilabs Debug: - - sumcheck_statement.add_constraint( - Weights::evaluation(MultilinearPoint(folding_randomness.clone())), claimed_batched_value); - - let sumcheck_prover = Prover::new(whir_configs.num_terms_5batched.clone()); - sumcheck_prover.prove(merlin, sumcheck_statement, sumcheck_witness)?; + produce_whir_proof( + merlin, + MultilinearPoint(folding_randomness.to_vec()), + claimed_batched_value, + whir_configs.num_terms_5batched.clone(), + sumcheck_witness, + )?; // Rowwise @@ -137,7 +141,7 @@ pub fn prove_spark_for_single_matrix( let final_vec: Vec = izip!(final_address, final_value, final_timestamp) .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) .collect(); - + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -186,7 +190,7 @@ pub fn prove_spark_for_single_matrix( let rs_address_eval = EvaluationsList::new(rs_address) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_address_eval)?; - + let rs_value_eval = EvaluationsList::new(rs_value) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_value_eval)?; @@ -195,22 +199,28 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_timestamp_eval)?; - let mut rowwise_statement = Statement::::new(evaluation_randomness.len()); - - let claimed_rowwise_eval = - rs_address_eval + - rs_value_eval * rowwise_witness.batching_randomness + - rs_timestamp_eval * rowwise_witness.batching_randomness * rowwise_witness.batching_randomness; + let claimed_rowwise_eval = rs_address_eval + + rs_value_eval * rowwise_witness.batching_randomness + + rs_timestamp_eval + * rowwise_witness.batching_randomness + * rowwise_witness.batching_randomness; - assert!(claimed_rowwise_eval == rowwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); + assert!( + claimed_rowwise_eval + == rowwise_witness + .batched_poly() + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec())) + ); - rowwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_rowwise_eval); - - let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); - sumcheck_prover.prove(merlin, rowwise_statement, rowwise_witness)?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + claimed_rowwise_eval, + whir_configs.num_terms_3batched.clone(), + rowwise_witness, + )?; - // Colwise + // Colwise // Colwise Init Final GPA @@ -287,7 +297,7 @@ pub fn prove_spark_for_single_matrix( let rs_address_eval = EvaluationsList::new(rs_address) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_address_eval)?; - + let rs_value_eval = EvaluationsList::new(rs_value) .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_value_eval)?; @@ -296,20 +306,26 @@ pub fn prove_spark_for_single_matrix( .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); merlin.hint(&rs_timestamp_eval)?; - let mut colwise_statement = Statement::::new(evaluation_randomness.len()); + let claimed_colwise_eval = rs_address_eval + + rs_value_eval * colwise_witness.batching_randomness + + rs_timestamp_eval + * colwise_witness.batching_randomness + * colwise_witness.batching_randomness; - let claimed_colwise_eval = - rs_address_eval + - rs_value_eval * colwise_witness.batching_randomness + - rs_timestamp_eval * colwise_witness.batching_randomness * colwise_witness.batching_randomness; + assert!( + claimed_colwise_eval + == colwise_witness + .batched_poly() + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec())) + ); - assert!(claimed_colwise_eval == colwise_witness.batched_poly().evaluate(&MultilinearPoint(evaluation_randomness.to_vec()))); - - colwise_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), claimed_colwise_eval); - - let sumcheck_prover = Prover::new(whir_configs.num_terms_3batched.clone()); - sumcheck_prover.prove(merlin, colwise_statement, colwise_witness)?; + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + claimed_colwise_eval, + whir_configs.num_terms_3batched.clone(), + colwise_witness, + )?; Ok(()) } diff --git a/spark-prover/src/whir.rs b/spark-prover/src/utilities/whir.rs similarity index 55% rename from spark-prover/src/whir.rs rename to spark-prover/src/utilities/whir.rs index fe3f0072..645e8189 100644 --- a/spark-prover/src/whir.rs +++ b/spark-prover/src/utilities/whir.rs @@ -2,10 +2,8 @@ use { anyhow::{Context, Result}, provekit_common::{ skyscraper::{SkyscraperMerkleConfig, SkyscraperPoW, SkyscraperSponge}, - utils::next_power_of_two, - FieldElement, WhirConfig, WhirR1CSScheme, R1CS, + FieldElement, WhirConfig, }, - provekit_r1cs_compiler::WhirR1CSSchemeBuilder, serde::{Deserialize, Serialize}, spongefish::ProverState, whir::{ @@ -36,35 +34,22 @@ pub fn commit_to_vector( #[derive(Serialize, Deserialize)] pub struct SPARKWHIRConfigs { - pub row: WhirConfig, - pub col: WhirConfig, - pub a: WhirConfig, - pub b: WhirConfig, - pub c: WhirConfig, - pub a_3batched: WhirConfig, - pub b_3batched: WhirConfig, - pub c_3batched: WhirConfig, + pub row: WhirConfig, + pub col: WhirConfig, + pub a: WhirConfig, + pub b: WhirConfig, + pub c: WhirConfig, + pub a_3batched: WhirConfig, + pub b_3batched: WhirConfig, + pub c_3batched: WhirConfig, } #[derive(Serialize, Deserialize)] pub struct SPARKWHIRConfigsNew { - pub row: WhirConfig, - pub col: WhirConfig, - pub num_terms_3batched: WhirConfig, - pub num_terms_5batched: WhirConfig, -} - -pub fn create_whir_configs(r1cs: &R1CS) -> SPARKWHIRConfigs { - SPARKWHIRConfigs { - row: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_rows), 1), - col: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_cols), 1), - a: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 1), - b: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 1), - c: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 1), - a_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.a.num_entries()), 3), - b_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.b.num_entries()), 3), - c_3batched: WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.c.num_entries()), 3), - } + pub row: WhirConfig, + pub col: WhirConfig, + pub num_terms_3batched: WhirConfig, + pub num_terms_5batched: WhirConfig, } pub fn produce_whir_proof( From a0b8b8d09d9356b8fe56ae53a679e314381e4cca Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 3 Oct 2025 16:33:05 +0800 Subject: [PATCH 26/34] cleanup circuit --- recursive-verifier/app/circuit/circuit.go | 89 ++++++++++------------- recursive-verifier/app/circuit/common.go | 20 ++--- recursive-verifier/app/circuit/mt.go | 6 -- recursive-verifier/app/circuit/types.go | 11 +-- 4 files changed, 50 insertions(+), 76 deletions(-) diff --git a/recursive-verifier/app/circuit/circuit.go b/recursive-verifier/app/circuit/circuit.go index 3db6a053..bb5c20ab 100644 --- a/recursive-verifier/app/circuit/circuit.go +++ b/recursive-verifier/app/circuit/circuit.go @@ -19,7 +19,6 @@ import ( ) type Circuit struct { - // Inputs WitnessLinearStatementEvaluations []frontend.Variable HidingSpartanLinearStatementEvaluations []frontend.Variable LogNumConstraints int @@ -32,13 +31,11 @@ type Circuit struct { WitnessMerkle Merkle WitnessFirstRound Merkle WHIRParamsWitness WHIRParams - // Is this not used? - WHIRParamsHidingSpartan WHIRParams + WHIRParamsHidingSpartan WHIRParams MatrixA []MatrixCell MatrixB []MatrixCell MatrixC []MatrixCell - // Public Input IO []byte UseSpark bool @@ -46,13 +43,14 @@ type Circuit struct { SPARKIO []byte Transcript []uints.U8 - WHIRRow WHIRParams - WHIRCol WHIRParams + + WHIRRow WHIRParams + WHIRCol WHIRParams PointRow []frontend.Variable PointCol []frontend.Variable - SparkA SPARKMatrixData + SparkRLC SPARKMatrixData } func (circuit *Circuit) Define(api frontend.API) error { @@ -106,7 +104,7 @@ func (circuit *Circuit) Define(api frontend.API) error { arthur, uapi, sc, - circuit.SparkA, + circuit.SparkRLC, circuit, ) if err != nil { @@ -150,13 +148,13 @@ func verifyCircuit( witnessLinearStatementEvaluations[1] = typeConverters.LimbsToBigIntMod(deferred[2].Limbs) witnessLinearStatementEvaluations[2] = typeConverters.LimbsToBigIntMod(deferred[3].Limbs) - acontSparkSumcheckLast := make([]frontend.Variable, 5) - asparkSumcheckLast := make([]frontend.Variable, 5) - asparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[0].Limbs) - asparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[1].Limbs) - asparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[2].Limbs) - asparkSumcheckLast[3] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[3].Limbs) - asparkSumcheckLast[4] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[4].Limbs) + contSparkSumcheckLast := make([]frontend.Variable, 5) + sparkSumcheckLast := make([]frontend.Variable, 5) + sparkSumcheckLast[0] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[0].Limbs) + sparkSumcheckLast[1] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[1].Limbs) + sparkSumcheckLast[2] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[2].Limbs) + sparkSumcheckLast[3] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[3].Limbs) + sparkSumcheckLast[4] = typeConverters.LimbsToBigIntMod(hints.AHints.sparkClaimedEvaluations[4].Limbs) contPointRow := make([]frontend.Variable, len(hints.pointRow)) pointRow := make([]frontend.Variable, len(hints.pointRow)) @@ -218,7 +216,7 @@ func verifyCircuit( } useSpark := evaluation == "spark" - // + var circuit = Circuit{ IO: []byte(cfg.IOPattern), Transcript: contTranscript, @@ -249,10 +247,10 @@ func verifyCircuit( PointRow: contPointRow, PointCol: contPointCol, - SparkA: SPARKMatrixData{ + SparkRLC: SPARKMatrixData{ Claimed: typeConverters.LimbsToBigIntMod(hints.AHints.claimed.Limbs), - SparkSumcheckLast: acontSparkSumcheckLast, + SparkSumcheckLast: contSparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSAddressEvaluation.Limbs), @@ -279,9 +277,10 @@ func verifyCircuit( ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, true), ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, true), - WHIRA3: NewWhirParams(sparkConfig.WHIR3), - WHIRA5: NewWhirParams(sparkConfig.WHIR5), - LogANumTerms: sparkConfig.LogNumTerms, + WHIR3: NewWhirParams(sparkConfig.WHIR3), + WHIR5: NewWhirParams(sparkConfig.WHIR5), + + LogNumTerms: sparkConfig.LogNumTerms, }, UseSpark: useSpark, @@ -347,10 +346,10 @@ func verifyCircuit( PointRow: pointRow, PointCol: pointCol, - SparkA: SPARKMatrixData{ + SparkRLC: SPARKMatrixData{ Claimed: typeConverters.LimbsToBigIntMod(hints.AHints.claimed.Limbs), - SparkSumcheckLast: asparkSumcheckLast, + SparkSumcheckLast: sparkSumcheckLast, RowFinalCounter: typeConverters.LimbsToBigIntMod(hints.AHints.rowFinalCounter.Limbs), RowRSAddressEvaluation: typeConverters.LimbsToBigIntMod(hints.AHints.rowRSAddressEvaluation.Limbs), @@ -377,9 +376,10 @@ func verifyCircuit( ColwiseMerkleFirstRound: newMerkle(hints.AHints.colwiseSparkMerkle.firstRoundMerklePaths.path, false), ColwiseMerkle: newMerkle(hints.AHints.colwiseSparkMerkle.roundHints, false), - WHIRA3: NewWhirParams(sparkConfig.WHIR3), - WHIRA5: NewWhirParams(sparkConfig.WHIR5), - LogANumTerms: sparkConfig.LogNumTerms, + WHIR3: NewWhirParams(sparkConfig.WHIR3), + WHIR5: NewWhirParams(sparkConfig.WHIR5), + + LogNumTerms: sparkConfig.LogNumTerms, }, UseSpark: useSpark, @@ -491,7 +491,6 @@ func sparkSingleMatrix( matrix SPARKMatrixData, circuit *Circuit, ) error { - claimedEvaluations := make([]frontend.Variable, 3) if err := arthur.FillNextScalars(claimedEvaluations); err != nil { return err @@ -508,15 +507,15 @@ func sparkSingleMatrix( api.Mul(claimedEvaluations[2], matrixCombinationRandomness[0], matrixCombinationRandomness[0]), ) - sumcheckCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA5) + sumcheckCommitment, err := parseBatchedCommitment(arthur, matrix.WHIR5) if err != nil { return err } - rowwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + rowwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIR3) if err != nil { return err } - colwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIRA3) + colwiseCommitment, err := parseBatchedCommitment(arthur, matrix.WHIR3) if err != nil { return err } @@ -530,17 +529,11 @@ func sparkSingleMatrix( return err } - // After debug: Change 1 to actual claimed value - sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, claimedValue, matrix.LogANumTerms, 4) + sparkSumcheckFoldingRandomness, sparkSumcheckLastEval, err := runSumcheck(api, arthur, claimedValue, matrix.LogNumTerms, 4) if err != nil { return err } - api.Println(sparkSumcheckLastEval) - - _ = sparkSumcheckFoldingRandomness - _ = sparkSumcheckLastEval - claimedVal := api.Add( matrix.SparkSumcheckLast[0], api.Mul(matrix.SparkSumcheckLast[1], matrixCombinationRandomness[0]), @@ -549,7 +542,7 @@ func sparkSingleMatrix( api.AssertIsEqual(sparkSumcheckLastEval, api.Mul(claimedVal, matrix.SparkSumcheckLast[3], matrix.SparkSumcheckLast[4])) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.SparkSumcheckMerkle, matrix.SparkSumcheckFirstRound, matrix.WHIRA5, [][]frontend.Variable{{}, {}, {}, {}, {}}, []frontend.Variable{}, sumcheckCommitment, + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.SparkSumcheckMerkle, matrix.SparkSumcheckFirstRound, matrix.WHIR5, [][]frontend.Variable{{}, {}, {}, {}, {}}, []frontend.Variable{}, sumcheckCommitment, [][]frontend.Variable{{matrix.SparkSumcheckLast[0]}, {matrix.SparkSumcheckLast[1]}, {matrix.SparkSumcheckLast[2]}, {matrix.SparkSumcheckLast[3]}, {matrix.SparkSumcheckLast[4]}}, [][]frontend.Variable{sparkSumcheckFoldingRandomness}, ) @@ -566,7 +559,6 @@ func sparkSingleMatrix( tau := tauGammaTemp[0] gamma := tauGammaTemp[1] - // Change this debug statement gpaResult, err := gpaSumcheckVerifier(api, arthur, len(circuit.PointRow)+2) if err != nil { return err @@ -598,8 +590,7 @@ func sparkSingleMatrix( api.AssertIsEqual(gpaResult.lastSumcheckValue, evaluated_value) - // Change this after debug - gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogANumTerms+2) + gpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogNumTerms+2) if err != nil { return err } @@ -617,7 +608,7 @@ func sparkSingleMatrix( api.AssertIsEqual(gpaResultRSWS.lastSumcheckValue, rsws_evaluated_value) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.RowwiseMerkle, matrix.RowwiseMerkleFirstRound, matrix.WHIR3, [][]frontend.Variable{{}}, []frontend.Variable{}, rowwiseCommitment, [][]frontend.Variable{{matrix.RowRSAddressEvaluation}, {matrix.RowRSValueEvaluation}, {matrix.RowRSTimestampEvaluation}}, [][]frontend.Variable{rsws_evaluation_randomness}, ) @@ -636,7 +627,6 @@ func sparkSingleMatrix( colwiseTau := colwiseTauGammaTemp[0] colwiseGamma := colwiseTauGammaTemp[1] - // Change this debug statement colwiseInitFinalGpaResult, err := gpaSumcheckVerifier(api, arthur, len(circuit.PointCol)-1+2) if err != nil { return err @@ -655,7 +645,7 @@ func sparkSingleMatrix( colwiseinit_opening := api.Sub(api.Add(api.Mul(colwiseaddr, colwiseGamma, colwiseGamma), api.Mul(colwisemem, colwiseGamma), colwiseinit_cntr), colwiseTau) - _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkA.ColFinalMerkle, circuit.SparkA.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, + _, err = RunZKWhir(api, arthur, uapi, sc, circuit.SparkRLC.ColFinalMerkle, circuit.SparkRLC.ColFinalMerkleFirstRound, circuit.WHIRCol, [][]frontend.Variable{{}}, []frontend.Variable{}, colFinalCommitment, [][]frontend.Variable{{matrix.ColFinalCounter}}, [][]frontend.Variable{colwiseEvaluation_randomness}, ) @@ -668,7 +658,8 @@ func sparkSingleMatrix( api.AssertIsEqual(colwiseInitFinalGpaResult.lastSumcheckValue, colwiseevaluated_value) // Colwise RS WS - colwisegpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogANumTerms+2) + + colwisegpaResultRSWS, err := gpaSumcheckVerifier(api, arthur, matrix.LogNumTerms+2) if err != nil { return err } @@ -686,7 +677,7 @@ func sparkSingleMatrix( api.AssertIsEqual(colwisegpaResultRSWS.lastSumcheckValue, colwisersws_evaluated_value) - _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIRA3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, + _, err = RunZKWhir(api, arthur, uapi, sc, matrix.ColwiseMerkle, matrix.ColwiseMerkleFirstRound, matrix.WHIR3, [][]frontend.Variable{{}}, []frontend.Variable{}, colwiseCommitment, [][]frontend.Variable{{matrix.ColRSAddressEvaluation}, {matrix.ColRSValueEvaluation}, {matrix.ColRSTimestampEvaluation}}, [][]frontend.Variable{colwisersws_evaluation_randomness}, ) @@ -696,11 +687,5 @@ func sparkSingleMatrix( api.AssertIsEqual(api.Mul(colwiseClaimedInit, colwiseClaimedWS), api.Mul(colwiseClaimedRS, colwiseClaimedFinal)) - _ = sumcheckCommitment - _ = rowwiseCommitment - _ = colwiseCommitment - _ = rowFinalCommitment - _ = colFinalCommitment - return nil } diff --git a/recursive-verifier/app/circuit/common.go b/recursive-verifier/app/circuit/common.go index 4d354d77..9ef59215 100644 --- a/recursive-verifier/app/circuit/common.go +++ b/recursive-verifier/app/circuit/common.go @@ -354,11 +354,11 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, var hidingSpartanData = consumeWhirData(config.WHIRConfigHidingSpartan, &merklePaths, &stirAnswers) var witnessData = consumeWhirData(config.WHIRConfigWitness, &merklePaths, &stirAnswers) - var asparkSumcheckData = consumeWhirData(sparkConfig.WHIR5, &sparkMerklePaths, &sparkStirAnswers) - var arowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) - var arowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) - var acolFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) - var acolwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) + var sparkSumcheckData = consumeWhirData(sparkConfig.WHIR5, &sparkMerklePaths, &sparkStirAnswers) + var rowFinal = consumeWhirData(sparkConfig.WHIRRow, &sparkMerklePaths, &sparkStirAnswers) + var rowwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) + var colFinal = consumeWhirData(sparkConfig.WHIRCol, &sparkMerklePaths, &sparkStirAnswers) + var colwiseSparkMerkle = consumeWhirData(sparkConfig.WHIR3, &sparkMerklePaths, &sparkStirAnswers) hints := Hints{ pointRow: pointRow, @@ -369,11 +369,11 @@ func PrepareAndVerifyCircuit(config Config, sparkConfig SparkConfig, r1cs R1CS, AHints: SparkMatrixHints{ claimed: claimedA, - sparkSumcheckData: asparkSumcheckData, - rowFinalMerkle: arowFinal, - rowwiseSparkMerkle: arowwiseSparkMerkle, - colFinalMerkle: acolFinal, - colwiseSparkMerkle: acolwiseSparkMerkle, + sparkSumcheckData: sparkSumcheckData, + rowFinalMerkle: rowFinal, + rowwiseSparkMerkle: rowwiseSparkMerkle, + colFinalMerkle: colFinal, + colwiseSparkMerkle: colwiseSparkMerkle, sparkClaimedEvaluations: sparkClaimedEvaluations[0], diff --git a/recursive-verifier/app/circuit/mt.go b/recursive-verifier/app/circuit/mt.go index ce5b41d0..4930c399 100644 --- a/recursive-verifier/app/circuit/mt.go +++ b/recursive-verifier/app/circuit/mt.go @@ -7,12 +7,6 @@ import ( "github.com/consensys/gnark/std/math/uints" ) -func newCombinedMerkle(hint ZKHint, isContainer bool) CombinedMerkle { - return CombinedMerkle{ - firstRound: newMerkle(hint.firstRoundMerklePaths.path, isContainer), - mainRounds: newMerkle(hint.roundHints, isContainer), - } -} func newMerkle( hint Hint, isContainer bool, diff --git a/recursive-verifier/app/circuit/types.go b/recursive-verifier/app/circuit/types.go index c32ddaf3..a9406e19 100644 --- a/recursive-verifier/app/circuit/types.go +++ b/recursive-verifier/app/circuit/types.go @@ -83,11 +83,6 @@ type Merkle struct { AuthPaths [][][]frontend.Variable } -type CombinedMerkle struct { - firstRound Merkle - mainRounds Merkle -} - // Other types type ProofObject struct { StatementValuesAtRandomPoint []Fp256 `json:"statement_values_at_random_point"` @@ -169,9 +164,9 @@ type Commitment struct { type SPARKMatrixData struct { Claimed frontend.Variable - WHIRA3 WHIRParams - WHIRA5 WHIRParams - LogANumTerms int + WHIR3 WHIRParams + WHIR5 WHIRParams + LogNumTerms int SparkSumcheckLast []frontend.Variable From 96d53cb41247f4d3fd5c150ae51854178e2adb00 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Fri, 3 Oct 2025 17:50:28 +0800 Subject: [PATCH 27/34] Cleanup --- spark-prover/README.md | 15 +++++++++++++++ spark-prover/src/main.rs | 30 ++++++++++++++++++------------ spark-prover/src/utilities/mod.rs | 10 +++++----- 3 files changed, 38 insertions(+), 17 deletions(-) diff --git a/spark-prover/README.md b/spark-prover/README.md index b9e5f93e..94afe423 100644 --- a/spark-prover/README.md +++ b/spark-prover/README.md @@ -1,6 +1,21 @@ # SPARK Experimental Rust prover and gnark recursive prover circuit will be implemented and optimized here. +## Running +``` +noirup --version v1.0.0-beta.11 +cd noir-examples/noir-passport-examples/complete_age_check +nargo compile +cargo run --release --bin provekit-cli prepare ./target/complete_age_check.json -o ./noir-proof-scheme.nps +cargo run --release --bin provekit-cli prove ./noir-proof-scheme.nps ./Prover.toml -o ./noir-proof.np +cargo run --release --bin provekit-cli generate-gnark-inputs ./noir-proof-scheme.nps ./noir-proof.np +cd ../../.. +cargo run --bin spark-prover -- --r1cs "noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --request "noir-examples/noir-passport-examples/complete_age_check/spark_request.json" +cargo run -p spark-prover --bin spark-verifier -- --proof "spark-prover/spark_proof.json" --request "noir-examples/noir-passport-examples/complete_age_check/spark_request.json" +cd recursive-verifier/cmd/cli +go run . --config "../../../noir-examples/noir-passport-examples/complete_age_check/params_for_recursive_verifier" --r1cs "../../../noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --evaluation spark --spark_config "../../../spark-prover/gnark_spark_proof.json" +``` + ## Running SPARK (under development) ```cargo run --bin spark-prover``` diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 968cf740..282233bd 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,13 +1,9 @@ use { - anyhow::{Context, Result}, - ark_ff::AdditiveGroup, - provekit_common::{ + anyhow::{Context, Result}, argh::FromArgs, ark_ff::AdditiveGroup, provekit_common::{ gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme, - }, - provekit_r1cs_compiler::WhirR1CSSchemeBuilder, - spark_prover::utilities::{ + }, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::utilities::{ deserialize_r1cs, deserialize_request, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, SparkMatrix, TimeStamps}, @@ -15,14 +11,24 @@ use { spark::prove_spark_for_single_matrix, whir::SPARKWHIRConfigsNew, MatrixDimensionsNew, SPARKProof, SPARKProofGnarkNew, - }, - spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, - std::{collections::BTreeMap, fs::File, io::Write}, - whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize}, + }, spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, path::PathBuf}, whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize} }; +#[derive(FromArgs)] +#[argh(description = "Spark Prover CLI")] +struct Args { + /// r1cs + #[argh(option)] + r1cs: PathBuf, + + /// request + #[argh(option)] + request: PathBuf, +} fn main() -> Result<()> { - let r1cs = deserialize_r1cs("spark-prover/r1cs.json") + let args: Args = argh::from_env(); + + let r1cs = deserialize_r1cs(&args.r1cs) .context("Error: Failed to create the R1CS object")?; // get combined matrix non-zero value coordinates @@ -133,7 +139,7 @@ fn main() -> Result<()> { .collect::>(); // Run for each request - let request = deserialize_request("spark-prover/request.json") + let request = deserialize_request(&args.request) .context("Error: Failed to deserialize the request object")?; let memory = calculate_memory(request.point_to_evaluate.clone()); diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 6866d56b..8807e63e 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -12,12 +12,12 @@ use { gnark::WHIRConfigGnark, spark::SPARKRequest, utils::next_power_of_two, R1CS, }, serde::{Deserialize, Serialize}, - std::fs, + std::{fs, path::PathBuf}, }; -pub fn deserialize_r1cs(path_str: &str) -> Result { +pub fn deserialize_r1cs(path: &PathBuf) -> Result { let json_str = - fs::read_to_string(path_str).context("Error: Failed to open the r1cs.json file")?; + fs::read_to_string(path).context("Error: Failed to open the r1cs.json file")?; let mut r1cs: R1CS = serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; r1cs.grow_matrices( @@ -27,9 +27,9 @@ pub fn deserialize_r1cs(path_str: &str) -> Result { Ok(r1cs) } -pub fn deserialize_request(path_str: &str) -> Result { +pub fn deserialize_request(path: &PathBuf) -> Result { let json_str = - fs::read_to_string(path_str).context("Error: Failed to open the request.json file")?; + fs::read_to_string(path).context("Error: Failed to open the request.json file")?; serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") } From 8660d5d1e98a92e650b4a19661592cfa0ba3f8ed Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Mon, 6 Oct 2025 13:49:36 +0800 Subject: [PATCH 28/34] Rust format --- spark-prover/src/main.rs | 23 +++++++++++++++-------- spark-prover/src/utilities/mod.rs | 3 +-- 2 files changed, 16 insertions(+), 10 deletions(-) diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs index 282233bd..ff15fb69 100644 --- a/spark-prover/src/main.rs +++ b/spark-prover/src/main.rs @@ -1,9 +1,14 @@ use { - anyhow::{Context, Result}, argh::FromArgs, ark_ff::AdditiveGroup, provekit_common::{ + anyhow::{Context, Result}, + argh::FromArgs, + ark_ff::AdditiveGroup, + provekit_common::{ gnark::WHIRConfigGnark, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme, - }, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, spark_prover::utilities::{ + }, + provekit_r1cs_compiler::WhirR1CSSchemeBuilder, + spark_prover::utilities::{ deserialize_r1cs, deserialize_request, iopattern::SPARKDomainSeparator, matrix::{COOMatrix, SparkMatrix, TimeStamps}, @@ -11,7 +16,10 @@ use { spark::prove_spark_for_single_matrix, whir::SPARKWHIRConfigsNew, MatrixDimensionsNew, SPARKProof, SPARKProofGnarkNew, - }, spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, std::{collections::BTreeMap, fs::File, io::Write, path::PathBuf}, whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize} + }, + spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + std::{collections::BTreeMap, fs::File, io::Write, path::PathBuf}, + whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize}, }; #[derive(FromArgs)] @@ -28,8 +36,7 @@ struct Args { fn main() -> Result<()> { let args: Args = argh::from_env(); - let r1cs = deserialize_r1cs(&args.r1cs) - .context("Error: Failed to create the R1CS object")?; + let r1cs = deserialize_r1cs(&args.r1cs).context("Error: Failed to create the R1CS object")?; // get combined matrix non-zero value coordinates @@ -240,8 +247,8 @@ fn main() -> Result<()> { merlin.hint(&request.point_to_evaluate.row)?; merlin.hint(&request.point_to_evaluate.col)?; - // Calculate the RLC of the matrices - // Note: can be also calculated from rlc of val_a, val_b, val_c + // Calculate the RLC of the matrices + // Note: can be also calculated from rlc of val_a, val_b, val_c merlin.add_scalars(&[ request.claimed_values.a, request.claimed_values.b, @@ -280,7 +287,7 @@ fn main() -> Result<()> { + request.claimed_values.b * matrix_batching_randomness + request.claimed_values.c * matrix_batching_randomness_sq; - // + // let spark_matrix = SparkMatrix { coo: COOMatrix { diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index 8807e63e..d010dec6 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -16,8 +16,7 @@ use { }; pub fn deserialize_r1cs(path: &PathBuf) -> Result { - let json_str = - fs::read_to_string(path).context("Error: Failed to open the r1cs.json file")?; + let json_str = fs::read_to_string(path).context("Error: Failed to open the r1cs.json file")?; let mut r1cs: R1CS = serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; r1cs.grow_matrices( From 962c6ca8cde39078ed08caf38fa6e2ae845600b0 Mon Sep 17 00:00:00 2001 From: shreyas-londhe Date: Thu, 9 Oct 2025 15:37:18 +0530 Subject: [PATCH 29/34] feat: refactor --- Cargo.toml | 3 +- provekit/spark/Cargo.toml | 33 ++ provekit/spark/README.md | 70 ++++ provekit/spark/src/bin/generate_test_r1cs.rs | 32 ++ .../spark/src/bin/generate_test_request.rs | 33 ++ provekit/spark/src/gpa.rs | 374 +++++++++++++++++ provekit/spark/src/lib.rs | 18 + provekit/spark/src/memory.rs | 375 ++++++++++++++++++ provekit/spark/src/preprocessing.rs | 231 +++++++++++ provekit/spark/src/prover.rs | 331 ++++++++++++++++ provekit/spark/src/sumcheck.rs | 132 ++++++ provekit/spark/src/types.rs | 102 +++++ provekit/spark/src/utils.rs | 64 +++ provekit/spark/src/verifier.rs | 159 ++++++++ tooling/spark-cli/Cargo.toml | 20 + tooling/spark-cli/src/cmd/mod.rs | 2 + tooling/spark-cli/src/cmd/prove.rs | 65 +++ tooling/spark-cli/src/cmd/verify.rs | 40 ++ tooling/spark-cli/src/main.rs | 26 ++ 19 files changed, 2109 insertions(+), 1 deletion(-) create mode 100644 provekit/spark/Cargo.toml create mode 100644 provekit/spark/README.md create mode 100644 provekit/spark/src/bin/generate_test_r1cs.rs create mode 100644 provekit/spark/src/bin/generate_test_request.rs create mode 100644 provekit/spark/src/gpa.rs create mode 100644 provekit/spark/src/lib.rs create mode 100644 provekit/spark/src/memory.rs create mode 100644 provekit/spark/src/preprocessing.rs create mode 100644 provekit/spark/src/prover.rs create mode 100644 provekit/spark/src/sumcheck.rs create mode 100644 provekit/spark/src/types.rs create mode 100644 provekit/spark/src/utils.rs create mode 100644 provekit/spark/src/verifier.rs create mode 100644 tooling/spark-cli/Cargo.toml create mode 100644 tooling/spark-cli/src/cmd/mod.rs create mode 100644 tooling/spark-cli/src/cmd/prove.rs create mode 100644 tooling/spark-cli/src/cmd/verify.rs create mode 100644 tooling/spark-cli/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index bf824f8c..f6760f26 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,10 +10,11 @@ members = [ "provekit/r1cs-compiler", "provekit/prover", "provekit/verifier", + "provekit/spark", "tooling/cli", "tooling/provekit-bench", "tooling/provekit-gnark", - "spark-prover", + "tooling/spark-cli", "tooling/verifier-server", "ntt", ] diff --git a/provekit/spark/Cargo.toml b/provekit/spark/Cargo.toml new file mode 100644 index 00000000..88c21869 --- /dev/null +++ b/provekit/spark/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "provekit-spark" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +provekit-common.workspace = true +provekit-r1cs-compiler.workspace = true +ark-ff.workspace = true +ark-std.workspace = true +anyhow.workspace = true +serde.workspace = true +serde_json.workspace = true +spongefish.workspace = true +whir.workspace = true +itertools = "0.14.0" + +[lints] +workspace = true + +[[bin]] +name = "generate_test_r1cs" +path = "src/bin/generate_test_r1cs.rs" + +[[bin]] +name = "generate_test_request" +path = "src/bin/generate_test_request.rs" + diff --git a/provekit/spark/README.md b/provekit/spark/README.md new file mode 100644 index 00000000..efd783c0 --- /dev/null +++ b/provekit/spark/README.md @@ -0,0 +1,70 @@ +# ProveKit SPARK + +SPARK (Sparse Polynomial Argument of Knowledge) prover and verifier implementation for ProveKit. + +## Structure + +- `src/types.rs` - Type definitions (proof, request, matrices, memory) +- `src/prover.rs` - Prover implementation and trait +- `src/verifier.rs` - Verifier implementation and trait +- `src/preprocessing.rs` - R1CS to SPARK matrix conversion +- `src/sumcheck.rs` - Sumcheck protocol (prover + verifier) +- `src/gpa.rs` - Grand Product Argument (prover + verifier) +- `src/memory.rs` - Memory checking (rowwise + colwise) +- `src/utils.rs` - Utilities (I/O, memory calculation, IO patterns) + +## Usage + +### As a Library + +```rust +use provekit_spark::{ + SPARKProver, SPARKProverScheme, SPARKVerifier, SPARKVerifierScheme, + deserialize_r1cs, deserialize_request, +}; + +// Proving +let r1cs = deserialize_r1cs("path/to/r1cs.json")?; +let request = deserialize_request("path/to/request.json")?; +let scheme = SPARKProverScheme::new_for_r1cs(&r1cs); +let proof = scheme.prove(&r1cs, &request)?; + +// Verifying +let scheme = SPARKVerifierScheme::from_proof(&proof); +scheme.verify(&proof, &request)?; +``` + +### As a CLI + +Use the `spark-cli` tool in `tooling/spark-cli`: + +```bash +# Prove +cargo run -p spark-cli -- prove \ + --r1cs path/to/r1cs.json \ + --request path/to/request.json \ + --output proof.json + +# Verify +cargo run -p spark-cli -- verify \ + --proof proof.json \ + --request request.json +``` + +### Test Utilities + +Generate test R1CS and request files: + +```bash +cargo run -p provekit-spark --bin generate_test_r1cs +cargo run -p provekit-spark --bin generate_test_request +``` + +## Architecture + +The SPARK implementation follows a trait-based design: + +- **SPARKProver**: Trait for proving, implemented by SPARKProverScheme +- **SPARKVerifier**: Trait for verification, implemented by SPARKVerifierScheme + +The prover and verifier share common types and utilities but are otherwise independent, allowing for easy testing and extension. diff --git a/provekit/spark/src/bin/generate_test_r1cs.rs b/provekit/spark/src/bin/generate_test_r1cs.rs new file mode 100644 index 00000000..ebf2f034 --- /dev/null +++ b/provekit/spark/src/bin/generate_test_r1cs.rs @@ -0,0 +1,32 @@ +use ::{ + provekit_common::{FieldElement, R1CS}, + std::{fs::File, io::Write}, +}; + +fn main() { + let mut r1cs = R1CS::new(); + r1cs.grow_matrices(256, 256); + let interned_1 = r1cs.interner.intern(FieldElement::from(1)); + let interned_2 = r1cs.interner.intern(FieldElement::from(2)); + let interned_3 = r1cs.interner.intern(FieldElement::from(3)); + + for i in 0..256 { + r1cs.a.set(i, i, interned_1); + r1cs.b.set(i, i, interned_2); + r1cs.c.set(i, i, interned_3); + } + + r1cs.a.set(1, 0, interned_1); + r1cs.a.set(2, 0, interned_1); + r1cs.a.set(3, 0, interned_1); + + let matrix_json = + serde_json::to_string(&r1cs).expect("Error: Failed to serialize R1CS to JSON"); + let mut request_file = + File::create("r1cs.json").expect("Error: Failed to create the r1cs.json file"); + request_file + .write_all(matrix_json.as_bytes()) + .expect("Error: Failed to write JSON data to r1cs.json"); + + println!("Generated r1cs.json"); +} diff --git a/provekit/spark/src/bin/generate_test_request.rs b/provekit/spark/src/bin/generate_test_request.rs new file mode 100644 index 00000000..0a6bffe1 --- /dev/null +++ b/provekit/spark/src/bin/generate_test_request.rs @@ -0,0 +1,33 @@ +use ::{ + provekit_common::{ + spark::{ClaimedValues, Point, SPARKRequest}, + FieldElement, + }, + std::{fs::File, io::Write}, +}; + +fn main() { + let mut row = vec![FieldElement::from(0); 8]; + let col = vec![FieldElement::from(0); 9]; + + row[7] = FieldElement::from(1); + + let spark_request = SPARKRequest { + point_to_evaluate: Point { row, col }, + claimed_values: ClaimedValues { + a: FieldElement::from(1), + b: FieldElement::from(0), + c: FieldElement::from(0), + }, + }; + + let request_json = + serde_json::to_string(&spark_request).expect("Error: Failed to serialize request to JSON"); + let mut request_file = + File::create("request.json").expect("Error: Failed to create the request.json file"); + request_file + .write_all(request_json.as_bytes()) + .expect("Error: Failed to write JSON data to request.json"); + + println!("Generated request.json"); +} diff --git a/provekit/spark/src/gpa.rs b/provekit/spark/src/gpa.rs new file mode 100644 index 00000000..3ce61813 --- /dev/null +++ b/provekit/spark/src/gpa.rs @@ -0,0 +1,374 @@ +use ::{ + provekit_common::{ + skyscraper::SkyscraperSponge, + utils::{ + next_power_of_two, + sumcheck::{ + calculate_eq, calculate_evaluations_over_boolean_hypercube_for_eq, eval_cubic_poly, + sumcheck_fold_map_reduce, + }, + HALF, + }, + FieldElement, + }, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitDeserialize, FieldToUnitSerialize, UnitToField}, + ProverState, VerifierState, + }, + whir::poly_utils::evals::EvaluationsList, +}; + +/// Runs the Grand Product Argument (GPA) protocol to prove product equality. +/// +/// GPA constructs a binary multiplication tree from `left` and `right` vectors, +/// then uses sumcheck-based proofs to verify that `∏left[i] = ∏right[i]` +/// without revealing the individual values. +/// +/// This is the core primitive for memory checking in SPARK, enabling efficient +/// verification that read and write sets are consistent. +/// +/// # Arguments +/// +/// * `merlin` - The prover's Fiat-Shamir transcript +/// * `left` - Initial state vector (must be power-of-2 length) +/// * `right` - Final state vector (must match `left` length) +/// +/// # Returns +/// +/// Vector of challenge randomness accumulated across all sumcheck rounds +/// +/// # Panics +/// +/// Panics if input vectors are not power-of-2 length +pub fn run_gpa( + merlin: &mut ProverState, + left: &[FieldElement], + right: &[FieldElement], +) -> Vec { + let mut concatenated = left.to_vec(); + concatenated.extend_from_slice(right); + let layers = calculate_binary_multiplication_tree(concatenated); + + let mut sumcheck_claim; + let mut line_randomness; + let mut line_evaluations; + let mut accumulated_randomness = Vec::::new(); + + (line_randomness, sumcheck_claim) = add_line_to_transcript(merlin, layers[1].clone()); + + for i in 2..layers.len() { + (line_evaluations, accumulated_randomness) = run_gpa_sumcheck( + merlin, + &line_randomness, + layers[i].clone(), + sumcheck_claim, + accumulated_randomness, + ); + (line_randomness, sumcheck_claim) = + add_line_to_transcript(merlin, line_evaluations.to_vec()); + } + + accumulated_randomness.push(line_randomness[0]); + accumulated_randomness +} + +/// Constructs a binary multiplication tree from the input vector. +/// +/// Each parent node is the product of its two children, forming a complete +/// binary tree where the root is the product of all elements. +/// +/// # Returns +/// +/// Vector of layers, where: +/// - `layers[0]` is the root (single element) +/// - `layers[layers.len()-1]` is the leaf layer (input) +/// +/// # Panics +/// +/// Panics if input length is not a power of two +fn calculate_binary_multiplication_tree( + array_to_prove: Vec, +) -> Vec> { + assert!( + array_to_prove.len() == (1 << next_power_of_two(array_to_prove.len())), + "Input length must be power of two" + ); + + let mut layers = vec![]; + let mut current_layer = array_to_prove; + + while current_layer.len() > 1 { + let next_layer = current_layer + .chunks_exact(2) + .map(|pair| pair[0] * pair[1]) + .collect(); + + layers.push(current_layer); + current_layer = next_layer; + } + + layers.push(current_layer); + layers.reverse(); + layers +} + +/// Adds a line polynomial to the transcript and samples verifier challenge. +/// +/// Converts evaluations to coefficients, commits them to the transcript, +/// then receives a random challenge to bind the prover to this layer. +/// +/// # Returns +/// +/// Tuple of `(challenge, next_sumcheck_claim)` for the following GPA round +fn add_line_to_transcript( + merlin: &mut ProverState, + arr: Vec, +) -> ([FieldElement; 1], FieldElement) { + let evaluations = EvaluationsList::new(arr); + let coeffs = evaluations.to_coeffs(); + let line_poly: &[FieldElement] = coeffs.coeffs(); + + merlin + .add_scalars(line_poly) + .expect("Failed to add line polynomial to transcript"); + + let mut challenge = [FieldElement::from(0); 1]; + merlin + .fill_challenge_scalars(&mut challenge) + .expect("Failed to sample challenge"); + + let next_claim = line_poly[0] + line_poly[1] * challenge[0]; + + (challenge, next_claim) +} + +/// Executes a single sumcheck round within the GPA protocol. +/// +/// This proves the relation: `eq(r, x) · v₀(x) · v₁(x)` sums correctly +/// over the boolean hypercube, where `v₀` and `v₁` are child layers +/// in the multiplication tree. +/// +/// # Returns +/// +/// Tuple of `(final_evaluations, accumulated_randomness)` for next round +fn run_gpa_sumcheck( + merlin: &mut ProverState, + r: &[FieldElement; 1], + layer: Vec, + mut sumcheck_claim: FieldElement, + mut accumulated_randomness: Vec, +) -> ([FieldElement; 2], Vec) { + let (mut even_layer, mut odd_layer) = split_even_odd(layer); + accumulated_randomness.push(r[0]); + + let mut eq_evaluations = + calculate_evaluations_over_boolean_hypercube_for_eq(&accumulated_randomness); + let mut challenge = [FieldElement::from(0)]; + let mut round_randomness = Vec::::new(); + let mut fold = None; + + loop { + // Evaluate sumcheck polynomial at special points: 0, -1, ∞ + let [eval_at_0, eval_at_neg1, eval_at_inf_over_x3] = sumcheck_fold_map_reduce( + [&mut eq_evaluations, &mut even_layer, &mut odd_layer], + fold, + |[eq, v0, v1]| { + [ + eq.0 * v0.0 * v1.0, + (eq.0 + eq.0 - eq.1) * (v0.0 + v0.0 - v0.1) * (v1.0 + v1.0 - v1.1), + (eq.1 - eq.0) * (v0.1 - v0.0) * (v1.1 - v1.0), + ] + }, + ); + + if fold.is_some() { + eq_evaluations.truncate(eq_evaluations.len() / 2); + even_layer.truncate(even_layer.len() / 2); + odd_layer.truncate(odd_layer.len() / 2); + } + + // Reconstruct cubic polynomial from evaluation points + let poly_coeffs = reconstruct_cubic_from_evaluations( + sumcheck_claim, + eval_at_0, + eval_at_neg1, + eval_at_inf_over_x3, + ); + + // Verify sumcheck binding: h(0) + h(1) = claimed_sum + assert_eq!( + sumcheck_claim, + poly_coeffs[0] + poly_coeffs[0] + poly_coeffs[1] + poly_coeffs[2] + poly_coeffs[3], + "Sumcheck binding check failed" + ); + + merlin + .add_scalars(&poly_coeffs) + .expect("Failed to add polynomial"); + merlin + .fill_challenge_scalars(&mut challenge) + .expect("Failed to sample challenge"); + + fold = Some(challenge[0]); + sumcheck_claim = eval_cubic_poly(&poly_coeffs, &challenge[0]); + round_randomness.push(challenge[0]); + + if eq_evaluations.len() <= 2 { + break; + } + } + + let final_v0 = even_layer[0] + (even_layer[1] - even_layer[0]) * challenge[0]; + let final_v1 = odd_layer[0] + (odd_layer[1] - odd_layer[0]) * challenge[0]; + + ([final_v0, final_v1], round_randomness) +} + +/// Reconstructs cubic polynomial coefficients from special point evaluations. +/// +/// Given evaluations at 0, -1, and ∞/x³, computes the unique cubic polynomial +/// that passes through these points and satisfies the sumcheck binding. +fn reconstruct_cubic_from_evaluations( + binding_value: FieldElement, + at_0: FieldElement, + at_neg1: FieldElement, + at_inf_over_x3: FieldElement, +) -> [FieldElement; 4] { + let mut coeffs = [FieldElement::from(0); 4]; + + coeffs[0] = at_0; + coeffs[2] = HALF * (binding_value + at_neg1 - at_0 - at_0 - at_0); + coeffs[3] = at_inf_over_x3; + coeffs[1] = binding_value - coeffs[0] - coeffs[0] - coeffs[3] - coeffs[2]; + + coeffs +} + +/// Splits vector into even-indexed and odd-indexed elements. +/// +/// Used to separate left/right children in the binary multiplication tree. +fn split_even_odd(input: Vec) -> (Vec, Vec) { + let mut even = Vec::new(); + let mut odd = Vec::new(); + + for (i, item) in input.into_iter().enumerate() { + if i % 2 == 0 { + even.push(item); + } else { + odd.push(item); + } + } + + (even, odd) +} + +/// Result of GPA sumcheck verification containing final randomness and claims. +pub struct GPASumcheckResult { + /// The two claimed values at the leaves (left and right products) + pub claimed_values: Vec, + /// Final sumcheck evaluation after all rounds + pub a_last_sumcheck_value: FieldElement, + /// Accumulated verifier randomness from all rounds + pub randomness: Vec, +} + +/// Verifies a Grand Product Argument proof from the transcript. +/// +/// This is the verifier's counterpart to [`run_gpa`], checking that the +/// prover's sumcheck proofs are valid without recomputing the multiplication +/// tree. +/// +/// # Arguments +/// +/// * `arthur` - The verifier's transcript state (Fiat-Shamir) +/// * `height_of_binary_tree` - Number of layers in the multiplication tree +/// +/// # Returns +/// +/// [`GPASumcheckResult`] containing verified claims and randomness +pub fn gpa_sumcheck_verifier( + arthur: &mut VerifierState, + height_of_binary_tree: usize, +) -> anyhow::Result { + let mut prev_randomness; + let mut current_randomness = Vec::::new(); + let mut claimed_values = [FieldElement::from(0); 2]; + let mut line_coeffs = [FieldElement::from(0); 2]; + let mut line_challenge = [FieldElement::from(0); 1]; + let mut cubic_coeffs = [FieldElement::from(0); 4]; + let mut sumcheck_challenge = [FieldElement::from(0); 1]; + + arthur.fill_next_scalars(&mut claimed_values)?; + arthur.fill_challenge_scalars(&mut line_challenge)?; + + let mut sumcheck_value = eval_line(&claimed_values, &line_challenge[0]); + current_randomness.push(line_challenge[0]); + prev_randomness = current_randomness; + current_randomness = Vec::new(); + + for layer_idx in 1..height_of_binary_tree - 1 { + for _ in 0..layer_idx { + arthur.fill_next_scalars(&mut cubic_coeffs)?; + arthur.fill_challenge_scalars(&mut sumcheck_challenge)?; + + // Verify sumcheck binding + assert_eq!( + eval_cubic_poly(&cubic_coeffs, &FieldElement::from(0)) + + eval_cubic_poly(&cubic_coeffs, &FieldElement::from(1)), + sumcheck_value, + "Sumcheck verification failed at layer {layer_idx}" + ); + + current_randomness.push(sumcheck_challenge[0]); + sumcheck_value = eval_cubic_poly(&cubic_coeffs, &sumcheck_challenge[0]); + } + + arthur.fill_next_scalars(&mut line_coeffs)?; + arthur.fill_challenge_scalars(&mut line_challenge)?; + + // Verify line polynomial evaluation + let expected_line_value = calculate_eq(&prev_randomness, ¤t_randomness) + * eval_line(&line_coeffs, &FieldElement::from(0)) + * eval_line(&line_coeffs, &FieldElement::from(1)); + assert_eq!( + expected_line_value, sumcheck_value, + "Line evaluation mismatch" + ); + + current_randomness.push(line_challenge[0]); + prev_randomness = current_randomness; + current_randomness = Vec::new(); + sumcheck_value = eval_line(&line_coeffs, &line_challenge[0]); + } + + Ok(GPASumcheckResult { + claimed_values: claimed_values.to_vec(), + a_last_sumcheck_value: sumcheck_value, + randomness: prev_randomness, + }) +} + +/// Evaluates a linear polynomial at a given point. +/// +/// Computes `poly[0] + point * poly[1]` for a degree-1 polynomial. +pub fn eval_line(poly: &[FieldElement], point: &FieldElement) -> FieldElement { + poly[0] + *point * poly[1] +} + +/// Calculates address from binary representation of evaluation point. +/// +/// Interprets the randomness vector as a binary number in reverse order, +/// converting it to the corresponding memory address in the hypercube. +/// +/// # Example +/// +/// `[r₀, r₁, r₂]` → `r₂·2² + r₁·2¹ + r₀·2⁰` +pub fn calculate_adr(randomness: &[FieldElement]) -> FieldElement { + randomness + .iter() + .rev() + .enumerate() + .fold(FieldElement::from(0), |acc, (i, &r)| { + acc + r * FieldElement::from(1 << i) + }) +} diff --git a/provekit/spark/src/lib.rs b/provekit/spark/src/lib.rs new file mode 100644 index 00000000..98041ac2 --- /dev/null +++ b/provekit/spark/src/lib.rs @@ -0,0 +1,18 @@ +pub mod gpa; +pub mod memory; +pub mod preprocessing; +pub mod prover; +pub mod sumcheck; +pub mod types; +pub mod utils; +pub mod verifier; + +pub use { + prover::{SPARKProver, SPARKScheme as SPARKProverScheme}, + types::{ + ClaimedValues, MatrixDimensions, Point, SPARKProof, SPARKProofGnark, SPARKRequest, + SPARKWHIRConfigs, + }, + utils::{calculate_memory, deserialize_r1cs, deserialize_request}, + verifier::{SPARKScheme as SPARKVerifierScheme, SPARKVerifier}, +}; diff --git a/provekit/spark/src/memory.rs b/provekit/spark/src/memory.rs new file mode 100644 index 00000000..b0862130 --- /dev/null +++ b/provekit/spark/src/memory.rs @@ -0,0 +1,375 @@ +use { + crate::{ + gpa::{calculate_adr, gpa_sumcheck_verifier, run_gpa}, + types::{Memory, SPARKWHIRConfigs, SparkMatrix}, + }, + anyhow::{ensure, Result}, + ark_std::One, + provekit_common::{ + skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, + utils::sumcheck::calculate_eq, + FieldElement, WhirConfig, + }, + spongefish::{codecs::arkworks_algebra::UnitToField, ProverState, VerifierState}, + whir::{ + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{ + committer::{CommitmentReader, Witness}, + prover::Prover, + statement::{Statement, Weights}, + utils::{HintDeserialize, HintSerialize}, + verifier::Verifier, + }, + }, +}; + +/// Configuration bundle for row/column axis-specific data. +/// +/// This zero-cost abstraction eliminates code duplication between +/// row-wise and column-wise memory checking protocols. +struct AxisConfig<'a> { + eq_memory: &'a [FieldElement], + final_timestamp: &'a [FieldElement], + read_timestamp: &'a [FieldElement], + address: &'a [FieldElement], + whir_config: &'a WhirConfig, +} + +/// Proves memory consistency for a single axis (row or column). +/// +/// Executes two GPAs: +/// 1. Init-Final GPA: Proves memory state transitions from initialization to +/// final +/// 2. Read-Write GPA: Proves read-set and write-set timestamps are consistent +/// +/// This is the core of SPARK's memory checking, ensuring that claimed memory +/// values match the actual constraint system evaluations. +#[inline] +fn prove_axis( + merlin: &mut ProverState, + config: AxisConfig<'_>, + e_values: &[FieldElement], + whir_configs: &SPARKWHIRConfigs, + final_ts_witness: Witness, + axis_witness: Witness, +) -> Result<()> { + let mut tau_and_gamma = [FieldElement::from(0); 2]; + merlin.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + // Construct opening vectors for init/final GPA using Fiat-Shamir challenges. + // Each opening encodes (address, value, timestamp) as: a*γ² + v*γ + t - τ + let init_vec: Vec<_> = (0..config.eq_memory.len()) + .map(|i| { + let a = FieldElement::from(i as u64); + let v = config.eq_memory[i]; + // Initial timestamp is always 0 (pre-access state) + a * gamma * gamma + v * gamma - tau + }) + .collect(); + + let final_vec: Vec<_> = (0..config.eq_memory.len()) + .map(|i| { + let a = FieldElement::from(i as u64); + let v = config.eq_memory[i]; + let t = config.final_timestamp[i]; + a * gamma * gamma + v * gamma + t - tau + }) + .collect(); + + let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let final_ts_eval = EvaluationsList::new(config.final_timestamp.to_vec()) + .evaluate(&MultilinearPoint(evaluation_randomness.to_vec())); + merlin.hint(&final_ts_eval)?; + + produce_whir_proof( + merlin, + MultilinearPoint(evaluation_randomness.to_vec()), + final_ts_eval, + config.whir_config.clone(), + final_ts_witness, + )?; + + // RS WS GPA + let rs_vec: Vec<_> = (0..config.address.len()) + .map(|i| { + let a = config.address[i]; + let v = e_values[i]; + let t = config.read_timestamp[i]; + a * gamma * gamma + v * gamma + t - tau + }) + .collect(); + + let ws_vec: Vec<_> = (0..config.address.len()) + .map(|i| { + let a = config.address[i]; + let v = e_values[i]; + let t = config.read_timestamp[i] + FieldElement::from(1); + a * gamma * gamma + v * gamma + t - tau + }) + .collect(); + + let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); + let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); + + let eval_point = MultilinearPoint(evaluation_randomness.to_vec()); + + let rs_address_eval = EvaluationsList::new(config.address.to_vec()).evaluate(&eval_point); + merlin.hint(&rs_address_eval)?; + + let rs_value_eval = EvaluationsList::new(e_values.to_vec()).evaluate(&eval_point); + merlin.hint(&rs_value_eval)?; + + let rs_timestamp_eval = + EvaluationsList::new(config.read_timestamp.to_vec()).evaluate(&eval_point); + merlin.hint(&rs_timestamp_eval)?; + + let br = axis_witness.batching_randomness; + let claimed_eval = rs_address_eval + rs_value_eval * br + rs_timestamp_eval * br * br; + + assert_eq!( + claimed_eval, + axis_witness.batched_poly().evaluate(&eval_point) + ); + + produce_whir_proof( + merlin, + eval_point, + claimed_eval, + whir_configs.num_terms_3batched.clone(), + axis_witness, + )?; + + Ok(()) +} + +/// Proves row-wise memory consistency for the SPARK protocol. +/// +/// # Arguments +/// * `merlin` - Prover's transcript state +/// * `matrix` - The preprocessed SPARK matrix with COO format and timestamps +/// * `memory` - Pre-computed equality check evaluations +/// * `e_rx` - Row evaluation vector +/// * `whir_configs` - WHIR polynomial commitment configurations +/// * `final_row_ts_witness` - Commitment witness for final row timestamps +/// * `rowwise_witness` - Batched commitment witness for row data +pub fn prove_rowwise( + merlin: &mut ProverState, + matrix: &SparkMatrix, + memory: &Memory, + e_rx: &[FieldElement], + whir_configs: &SPARKWHIRConfigs, + final_row_ts_witness: Witness, + rowwise_witness: Witness, +) -> Result<()> { + prove_axis( + merlin, + AxisConfig { + eq_memory: &memory.eq_rx, + final_timestamp: &matrix.timestamps.final_row, + read_timestamp: &matrix.timestamps.read_row, + address: &matrix.coo.row, + whir_config: &whir_configs.row, + }, + e_rx, + whir_configs, + final_row_ts_witness, + rowwise_witness, + ) +} + +pub fn prove_colwise( + merlin: &mut ProverState, + matrix: &SparkMatrix, + memory: &Memory, + e_ry: &[FieldElement], + whir_configs: &SPARKWHIRConfigs, + final_col_ts_witness: Witness, + colwise_witness: Witness, +) -> Result<()> { + prove_axis( + merlin, + AxisConfig { + eq_memory: &memory.eq_ry, + final_timestamp: &matrix.timestamps.final_col, + read_timestamp: &matrix.timestamps.read_col, + address: &matrix.coo.col, + whir_config: &whir_configs.col, + }, + e_ry, + whir_configs, + final_col_ts_witness, + colwise_witness, + ) +} + +// ============================================================================ +// Verifier - Generic Implementation +// ============================================================================ + +#[inline] +fn verify_axis( + arthur: &mut VerifierState, + num_axis_items: usize, + num_nonzero_terms: usize, + whir_params: &SPARKWHIRConfigs, + whir_config: &WhirConfig, + init_mem_fn: impl Fn(&[FieldElement]) -> FieldElement, +) -> Result<()> { + let commitment_reader = CommitmentReader::new(whir_config); + let a_3batched_reader = CommitmentReader::new(&whir_params.num_terms_3batched); + + let a_axis_commitment = a_3batched_reader.parse_commitment(arthur)?; + let a_finalts_commitment = commitment_reader.parse_commitment(arthur)?; + + let mut tau_and_gamma = [FieldElement::from(0); 2]; + arthur.fill_challenge_scalars(&mut tau_and_gamma)?; + let tau = tau_and_gamma[0]; + let gamma = tau_and_gamma[1]; + + // Init Final GPA + let gpa_result = gpa_sumcheck_verifier( + arthur, + provekit_common::utils::next_power_of_two(num_axis_items) + 2, + )?; + + let claimed_init = gpa_result.claimed_values[0]; + let claimed_final = gpa_result.claimed_values[1]; + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + + let init_adr = calculate_adr(&evaluation_randomness.to_vec()); + let init_mem = init_mem_fn(&evaluation_randomness.to_vec()); + let init_opening = init_adr * gamma * gamma + init_mem * gamma - tau; + + let final_cntr: FieldElement = arthur.hint()?; + + let mut final_cntr_statement = + Statement::::new(provekit_common::utils::next_power_of_two(num_axis_items)); + final_cntr_statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), + final_cntr, + ); + + let final_cntr_verifier = Verifier::new(whir_config); + final_cntr_verifier.verify(arthur, &a_finalts_commitment, &final_cntr_statement)?; + + let final_adr = calculate_adr(&evaluation_randomness.to_vec()); + let final_mem = init_mem_fn(&evaluation_randomness.to_vec()); + let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; + + let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) + + final_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + + // RS WS GPA + let gpa_result = gpa_sumcheck_verifier( + arthur, + provekit_common::utils::next_power_of_two(num_nonzero_terms) + 2, + )?; + + let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); + let claimed_rs = gpa_result.claimed_values[0]; + let claimed_ws = gpa_result.claimed_values[1]; + + let rs_adr: FieldElement = arthur.hint()?; + let rs_mem: FieldElement = arthur.hint()?; + let rs_timestamp: FieldElement = arthur.hint()?; + + let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; + let ws_opening = + rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; + + let evaluated_value = + rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; + + ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); + + let mut statement = Statement::::new(provekit_common::utils::next_power_of_two( + num_nonzero_terms, + )); + + let br = a_axis_commitment.batching_randomness; + statement.add_constraint( + Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec())), + rs_adr + rs_mem * br + rs_timestamp * br * br, + ); + + let verifier = Verifier::new(&whir_params.num_terms_3batched); + verifier.verify(arthur, &a_axis_commitment, &statement)?; + + ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); + + Ok(()) +} + +// ============================================================================ +// Public API - Verifier +// ============================================================================ + +pub fn verify_rowwise( + arthur: &mut VerifierState, + num_rows: usize, + num_nonzero_terms: usize, + whir_params: &SPARKWHIRConfigs, + request: &crate::types::SPARKRequest, + _matrix_batching_randomness: &FieldElement, +) -> Result<()> { + verify_axis( + arthur, + num_rows, + num_nonzero_terms, + whir_params, + &whir_params.row, + |eval_rand| calculate_eq(&request.point_to_evaluate.row, eval_rand), + ) +} + +pub fn verify_colwise( + arthur: &mut VerifierState, + num_cols: usize, + num_nonzero_terms: usize, + whir_params: &SPARKWHIRConfigs, + request: &crate::types::SPARKRequest, + _matrix_batching_randomness: &FieldElement, +) -> Result<()> { + verify_axis( + arthur, + num_cols, + num_nonzero_terms, + whir_params, + &whir_params.col, + |eval_rand| { + calculate_eq(&request.point_to_evaluate.col[1..], eval_rand) + * (FieldElement::from(1) - request.point_to_evaluate.col[0]) + }, + ) +} + +// ============================================================================ +// Helper +// ============================================================================ + +/// Helper to generate and verify a WHIR proof at a specific evaluation point. +/// +/// # Note +/// This is called multiple times during SPARK proving for different polynomial +/// commitments. +fn produce_whir_proof( + merlin: &mut ProverState, + evaluation_point: MultilinearPoint, + evaluated_value: FieldElement, + config: WhirConfig, + witness: Witness, +) -> Result<()> { + let mut statement = Statement::::new(evaluation_point.num_variables()); + statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); + + let prover = Prover::new(config); + prover.prove(merlin, statement, witness)?; + + Ok(()) +} diff --git a/provekit/spark/src/preprocessing.rs b/provekit/spark/src/preprocessing.rs new file mode 100644 index 00000000..5c3a9cf0 --- /dev/null +++ b/provekit/spark/src/preprocessing.rs @@ -0,0 +1,231 @@ +use { + crate::types::{COOMatrix, EValuesForMatrix, Memory, SparkMatrix, TimeStamps}, + anyhow::Result, + ark_ff::AdditiveGroup, + provekit_common::{utils::next_power_of_two, FieldElement, R1CS}, + std::collections::BTreeMap, +}; + +/// Preprocesses R1CS matrices into SPARK's memory-checkable COO format. +/// +/// Combines A, B, C matrix coordinates, pads to power-of-2, and generates +/// read/write timestamps for the memory checking protocol. +pub struct MatrixPreprocessor { + pub row: Vec, + pub col: Vec, + pub val_a: Vec, + pub val_b: Vec, + pub val_c: Vec, + pub read_row: Vec, + pub read_col: Vec, + pub final_row: Vec, + pub final_col: Vec, + pub original_num_entries: usize, + pub padded_num_entries: usize, + /// Union of all non-zero coordinates across A, B, C + combined_matrix_map: BTreeMap<(usize, usize), FieldElement>, +} + +impl MatrixPreprocessor { + /// Constructs preprocessor from R1CS, computing union of matrix + /// coordinates. + /// + /// This one-time preprocessing: + /// - Merges coordinates from A, B, C matrices (some entries may be zero) + /// - Pads to power-of-2 for efficient polynomial operations + /// - Generates memory access timestamps for GPA protocol + pub fn from_r1cs(r1cs: &R1CS) -> Result { + // Union of all non-zero coordinates + let mut combined_matrix_map: BTreeMap<(usize, usize), FieldElement> = r1cs + .a() + .iter() + .map(|(coordinate, _)| (coordinate, FieldElement::ZERO)) + .collect(); + + for (coordinate, _) in r1cs.b().iter() { + combined_matrix_map + .entry(coordinate) + .or_insert(FieldElement::ZERO); + } + + for (coordinate, _) in r1cs.c().iter() { + combined_matrix_map + .entry(coordinate) + .or_insert(FieldElement::ZERO); + } + + let original_num_entries = combined_matrix_map.keys().count(); + let padded_num_entries = 1 << next_power_of_two(original_num_entries); + + let mut row = Vec::with_capacity(padded_num_entries); + let mut col = Vec::with_capacity(padded_num_entries); + + for (r, c) in combined_matrix_map.keys() { + row.push(FieldElement::from(*r as u64)); + col.push(FieldElement::from(*c as u64)); + } + + let to_fill = padded_num_entries - original_num_entries; + row.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); + col.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); + + let mut val_a = vec![FieldElement::ZERO; padded_num_entries]; + let mut val_b = vec![FieldElement::ZERO; padded_num_entries]; + let mut val_c = vec![FieldElement::ZERO; padded_num_entries]; + + // Merge-scan to populate individual matrix values + let a_binding = r1cs.a(); + let b_binding = r1cs.b(); + let c_binding = r1cs.c(); + + let mut a_iter = a_binding.iter(); + let mut b_iter = b_binding.iter(); + let mut c_iter = c_binding.iter(); + + let mut a_cur = a_iter.next(); + let mut b_cur = b_iter.next(); + let mut c_cur = c_iter.next(); + + for (index, coordinate) in combined_matrix_map.keys().enumerate() { + if let Some((coord, value)) = a_cur { + if coord == *coordinate { + val_a[index] = value; + a_cur = a_iter.next(); + } + } + + if let Some((coord, value)) = b_cur { + if coord == *coordinate { + val_b[index] = value; + b_cur = b_iter.next(); + } + } + + if let Some((coord, value)) = c_cur { + if coord == *coordinate { + val_c[index] = value; + c_cur = c_iter.next(); + } + } + } + + // Memory timestamps track access order for GPA protocol + let mut read_row_counters = vec![0; r1cs.num_constraints()]; + let mut read_col_counters = vec![0; r1cs.num_witnesses()]; + let mut read_row = Vec::with_capacity(padded_num_entries); + let mut read_col = Vec::with_capacity(padded_num_entries); + + for (r, c) in combined_matrix_map.keys() { + read_row.push(FieldElement::from(read_row_counters[*r] as u64)); + read_col.push(FieldElement::from(read_col_counters[*c] as u64)); + read_row_counters[*r] += 1; + read_col_counters[*c] += 1; + } + + // Padding entries all access row[0], col[0] + for _ in 0..to_fill { + read_row.push(FieldElement::from(read_row_counters[0] as u64)); + read_col.push(FieldElement::from(read_col_counters[0] as u64)); + read_row_counters[0] += 1; + read_col_counters[0] += 1; + } + + let final_row = read_row_counters + .iter() + .map(|&x| FieldElement::from(x as u64)) + .collect::>(); + + let final_col = read_col_counters + .iter() + .map(|&x| FieldElement::from(x as u64)) + .collect::>(); + + Ok(Self { + row, + col, + val_a, + val_b, + val_c, + read_row, + read_col, + final_row, + final_col, + original_num_entries, + padded_num_entries, + combined_matrix_map, + }) + } + + /// Combines A + α·B + α²·C into single SPARK matrix using batching + /// randomness. + pub fn to_spark_matrix( + &self, + r1cs: &R1CS, + matrix_batching_randomness: FieldElement, + ) -> SparkMatrix { + let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; + + let mut combined_matrix_map = self.combined_matrix_map.clone(); + + for (coordinate, value) in r1cs.a().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| { + *cur += value; + }); + } + + for (coordinate, value) in r1cs.b().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| { + *cur += value * matrix_batching_randomness; + }); + } + + for (coordinate, value) in r1cs.c().iter() { + combined_matrix_map.entry(coordinate).and_modify(|cur| { + *cur += value * matrix_batching_randomness_sq; + }); + } + + let mut val = Vec::with_capacity(self.padded_num_entries); + for value in combined_matrix_map.values() { + val.push(*value); + } + let to_fill = self.padded_num_entries - self.original_num_entries; + val.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); + + SparkMatrix { + coo: COOMatrix { + row: self.row.clone(), + col: self.col.clone(), + val, + val_a: self.val_a.clone(), + val_b: self.val_b.clone(), + val_c: self.val_c.clone(), + }, + timestamps: TimeStamps { + read_row: self.read_row.clone(), + read_col: self.read_col.clone(), + final_row: self.final_row.clone(), + final_col: self.final_col.clone(), + }, + } + } + + /// Computes row and column evaluation vectors for the combined coordinates. + /// + /// For each entry at (r, c), stores eq(point_row, r) and eq(point_col, c). + pub fn compute_e_values(&self, memory: &Memory) -> EValuesForMatrix { + let mut e_rx = Vec::with_capacity(self.padded_num_entries); + let mut e_ry = Vec::with_capacity(self.padded_num_entries); + + for (r, c) in self.combined_matrix_map.keys() { + e_rx.push(memory.eq_rx[*r]); + e_ry.push(memory.eq_ry[*c]); + } + + let to_fill = self.padded_num_entries - self.original_num_entries; + e_rx.extend(std::iter::repeat(memory.eq_rx[0]).take(to_fill)); + e_ry.extend(std::iter::repeat(memory.eq_ry[0]).take(to_fill)); + + EValuesForMatrix { e_rx, e_ry } + } +} diff --git a/provekit/spark/src/prover.rs b/provekit/spark/src/prover.rs new file mode 100644 index 00000000..ad067e68 --- /dev/null +++ b/provekit/spark/src/prover.rs @@ -0,0 +1,331 @@ +use { + crate::{ + memory::{prove_colwise, prove_rowwise}, + preprocessing::MatrixPreprocessor, + sumcheck::run_spark_sumcheck, + types::{EValuesForMatrix, MatrixDimensions, SPARKProof, SPARKRequest, SPARKWHIRConfigs}, + utils::{calculate_memory, SPARKDomainSeparator}, + }, + anyhow::Result, + ark_ff::AdditiveGroup, + provekit_common::{ + skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, + utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, + FieldElement, IOPattern, WhirR1CSScheme, R1CS, + }, + provekit_r1cs_compiler::WhirR1CSSchemeBuilder, + spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + whir::{ + poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, + whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, utils::HintSerialize}, + }, +}; + +/// SPARK proving interface for R1CS constraint systems. +pub trait SPARKProver { + /// Generates a SPARK proof from R1CS and evaluation request. + fn prove(&self, r1cs: &R1CS, request: &SPARKRequest) -> Result; +} + +/// SPARK scheme with pre-configured WHIR parameters and IO pattern. +pub struct SPARKScheme { + pub whir_configs: SPARKWHIRConfigs, + pub io_pattern: IOPattern, + pub matrix_dimensions: MatrixDimensions, +} + +impl SPARKScheme { + /// Configures SPARK scheme for given R1CS dimensions. + pub fn new_for_r1cs(r1cs: &R1CS) -> Self { + let num_rows = r1cs.num_constraints(); + let num_cols = r1cs.num_witnesses(); + + let mut coordinates = std::collections::BTreeSet::new(); + for ((row, col), _) in r1cs.a().iter() { + coordinates.insert((row, col)); + } + for ((row, col), _) in r1cs.b().iter() { + coordinates.insert((row, col)); + } + for ((row, col), _) in r1cs.c().iter() { + coordinates.insert((row, col)); + } + let nonzero_terms = coordinates.len(); + let padded_num_entries = 1 << next_power_of_two(nonzero_terms); + + let row_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(num_rows), 1); + let col_config = WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(num_cols), 1); + let num_terms_3batched_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); + let num_terms_5batched_config = + WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 5); + + let whir_configs = SPARKWHIRConfigs { + row: row_config.clone(), + col: col_config.clone(), + num_terms_3batched: num_terms_3batched_config.clone(), + num_terms_5batched: num_terms_5batched_config.clone(), + }; + + let mut io = IOPattern::new("💥"); + + io = io + .hint("point_row") + .hint("point_col") + .add_claimed_evaluations(); + + io = io + .commit_statement(&num_terms_5batched_config) + .commit_statement(&num_terms_3batched_config) + .commit_statement(&num_terms_3batched_config) + .commit_statement(&row_config) + .commit_statement(&col_config) + .add_sumcheck_polynomials(next_power_of_two(padded_num_entries)) + .hint("sumcheck_last_folds") + .add_whir_proof(&num_terms_5batched_config); + + io = io.add_tau_and_gamma(); + for i in 0..=next_power_of_two(num_rows) { + io = io.add_sumcheck_polynomials(i).add_line(); + } + io = io + .hint("row_final_counter_claimed_evaluation") + .add_whir_proof(&row_config); + + for i in 0..=next_power_of_two(padded_num_entries) { + io = io.add_sumcheck_polynomials(i).add_line(); + } + io = io + .hint("row_rs_address_claimed_evaluation") + .hint("row_rs_value_claimed_evaluation") + .hint("row_rs_timestamp_claimed_evaluation") + .add_whir_proof(&num_terms_3batched_config); + + io = io.add_tau_and_gamma(); + for i in 0..=next_power_of_two(num_cols) { + io = io.add_sumcheck_polynomials(i).add_line(); + } + io = io + .hint("col_final_counter_claimed_evaluation") + .add_whir_proof(&col_config); + + for i in 0..=next_power_of_two(padded_num_entries) { + io = io.add_sumcheck_polynomials(i).add_line(); + } + io = io + .hint("col_rs_address_claimed_evaluation") + .hint("col_rs_value_claimed_evaluation") + .hint("col_rs_timestamp_claimed_evaluation") + .add_whir_proof(&num_terms_3batched_config); + + Self { + whir_configs, + io_pattern: io, + matrix_dimensions: MatrixDimensions { + num_rows, + num_cols, + nonzero_terms, + }, + } + } +} + +impl SPARKProver for SPARKScheme { + fn prove(&self, r1cs: &R1CS, request: &SPARKRequest) -> Result { + let processed = MatrixPreprocessor::from_r1cs(r1cs)?; + let memory = calculate_memory(request.point_to_evaluate.clone()); + let e_values = processed.compute_e_values(&memory); + + let mut merlin = self.io_pattern.to_prover_state(); + + merlin.hint(&request.point_to_evaluate.row)?; + merlin.hint(&request.point_to_evaluate.col)?; + + merlin.add_scalars(&[ + request.claimed_values.a, + request.claimed_values.b, + request.claimed_values.c, + ])?; + let mut matrix_batching_randomness = [FieldElement::ZERO; 1]; + merlin.fill_challenge_scalars(&mut matrix_batching_randomness)?; + let matrix_batching_randomness = matrix_batching_randomness[0]; + let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; + + let spark_matrix = processed.to_spark_matrix(r1cs, matrix_batching_randomness); + + let claimed_value = request.claimed_values.a + + request.claimed_values.b * matrix_batching_randomness + + request.claimed_values.c * matrix_batching_randomness_sq; + + prove_spark_for_single_matrix( + &mut merlin, + spark_matrix, + &memory, + e_values, + claimed_value, + &self.whir_configs, + )?; + + Ok(SPARKProof { + transcript: merlin.narg_string().to_vec(), + io_pattern: String::from_utf8(self.io_pattern.as_bytes().to_vec())?, + whir_params: self.whir_configs.clone(), + matrix_dimensions: self.matrix_dimensions.clone(), + }) + } +} + +/// Core SPARK protocol: sumcheck + row/col memory checking. +fn prove_spark_for_single_matrix( + merlin: &mut spongefish::ProverState, + matrix: crate::types::SparkMatrix, + memory: &crate::types::Memory, + e_values: EValuesForMatrix, + claimed_value: FieldElement, + whir_configs: &SPARKWHIRConfigs, +) -> Result<()> { + let row_committer = CommitmentWriter::new(whir_configs.row.clone()); + let col_committer = CommitmentWriter::new(whir_configs.col.clone()); + let batched3_committer = CommitmentWriter::new(whir_configs.num_terms_3batched.clone()); + let batched5_committer = CommitmentWriter::new(whir_configs.num_terms_5batched.clone()); + + // Commit to 5 polynomials: val_a, val_b, val_c, eᵣ, eᵧ + let sumcheck_witness = batched5_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.val_a.clone()).to_coeffs(), + EvaluationsList::new(matrix.coo.val_b.clone()).to_coeffs(), + EvaluationsList::new(matrix.coo.val_c.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), + ])?; + + let rowwise_witness = batched3_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), + EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(), + ])?; + + let colwise_witness = batched3_committer.commit_batch(merlin, &[ + EvaluationsList::new(matrix.coo.col.clone()).to_coeffs(), + EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), + EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), + ])?; + + let final_row_ts_witness = + commit_to_vector(&row_committer, merlin, matrix.timestamps.final_row.clone()); + let final_col_ts_witness = + commit_to_vector(&col_committer, merlin, matrix.timestamps.final_col.clone()); + + let mles = [ + matrix.coo.val.clone(), + e_values.e_rx.clone(), + e_values.e_ry.clone(), + ]; + + let (sumcheck_final_folds, folding_randomness) = + run_spark_sumcheck(merlin, mles, claimed_value)?; + + let val_a_eval = EvaluationsList::new(matrix.coo.val_a.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + let val_b_eval = EvaluationsList::new(matrix.coo.val_b.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + let val_c_eval = EvaluationsList::new(matrix.coo.val_c.clone()) + .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); + + merlin.hint::>( + &[ + val_a_eval, + val_b_eval, + val_c_eval, + sumcheck_final_folds[1], + sumcheck_final_folds[2], + ] + .to_vec(), + )?; + + // Batching randomness powers: [1, β, β², β³, β⁴] + let mut batching_randomness = Vec::with_capacity(5); + let mut cur = FieldElement::from(1); + for _ in 0..5 { + batching_randomness.push(cur); + cur *= sumcheck_witness.batching_randomness; + } + + let claimed_batched_value = val_a_eval * batching_randomness[0] + + val_b_eval * batching_randomness[1] + + val_c_eval * batching_randomness[2] + + sumcheck_final_folds[1] * batching_randomness[3] + + sumcheck_final_folds[2] * batching_randomness[4]; + + produce_whir_proof( + merlin, + MultilinearPoint(folding_randomness.to_vec()), + claimed_batched_value, + whir_configs.num_terms_5batched.clone(), + sumcheck_witness, + )?; + + prove_rowwise( + merlin, + &matrix, + memory, + &e_values.e_rx, + whir_configs, + final_row_ts_witness, + rowwise_witness, + )?; + + prove_colwise( + merlin, + &matrix, + memory, + &e_values.e_ry, + whir_configs, + final_col_ts_witness, + colwise_witness, + )?; + + Ok(()) +} + +/// Commits to vector and returns WHIR witness. +fn commit_to_vector( + committer: &CommitmentWriter< + FieldElement, + SkyscraperMerkleConfig, + provekit_common::skyscraper::SkyscraperPoW, + >, + merlin: &mut spongefish::ProverState, + vector: Vec, +) -> whir::whir::committer::Witness { + assert!( + vector.len().is_power_of_two(), + "Vector length must be power of two" + ); + let evals = EvaluationsList::new(vector); + let coeffs = evals.to_coeffs(); + committer + .commit(merlin, coeffs) + .expect("WHIR commitment failed") +} + +/// Generates WHIR opening proof for polynomial evaluation. +fn produce_whir_proof( + merlin: &mut spongefish::ProverState, + evaluation_point: MultilinearPoint, + evaluated_value: FieldElement, + config: provekit_common::WhirConfig, + witness: whir::whir::committer::Witness, +) -> Result<()> { + use whir::whir::{ + prover::Prover, + statement::{Statement, Weights}, + }; + + let mut statement = Statement::::new(evaluation_point.num_variables()); + statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); + let prover = Prover::new(config); + + prover.prove(merlin, statement, witness)?; + + Ok(()) +} diff --git a/provekit/spark/src/sumcheck.rs b/provekit/spark/src/sumcheck.rs new file mode 100644 index 00000000..ba409ecc --- /dev/null +++ b/provekit/spark/src/sumcheck.rs @@ -0,0 +1,132 @@ +use ::{ + anyhow::{ensure, Result}, + ark_std::{One, Zero}, + provekit_common::{ + skyscraper::SkyscraperSponge, + utils::{ + sumcheck::{eval_cubic_poly, sumcheck_fold_map_reduce}, + HALF, + }, + FieldElement, + }, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitDeserialize, FieldToUnitSerialize, UnitToField}, + ProverState, VerifierState, + }, +}; + +/// Runs sumcheck protocol for SPARK matrix evaluation. +/// +/// Proves that `∑ m₀(x) · m₁(x) · m₂(x) = claimed_value` over the boolean +/// hypercube without revealing individual polynomial values. +/// +/// # Returns +/// +/// Tuple of `(final_folded_values, accumulated_randomness)` +pub fn run_spark_sumcheck( + merlin: &mut ProverState, + mles: [Vec; 3], + mut claimed_value: FieldElement, +) -> Result<([FieldElement; 3], Vec)> { + let mut sumcheck_randomness = [FieldElement::from(0)]; + let mut sumcheck_randomness_accumulator = Vec::::new(); + let mut fold = None; + + let mut m0 = mles[0].clone(); + let mut m1 = mles[1].clone(); + let mut m2 = mles[2].clone(); + + loop { + // Evaluate cubic at special points: 0, -1, ∞ + let [hhat_i_at_0, hhat_i_at_em1, hhat_i_at_inf_over_x_cube] = + sumcheck_fold_map_reduce([&mut m0, &mut m1, &mut m2], fold, |[m0, m1, m2]| { + [ + m0.0 * m1.0 * m2.0, + (m0.0 + m0.0 - m0.1) * (m1.0 + m1.0 - m1.1) * (m2.0 + m2.0 - m2.1), + (m0.1 - m0.0) * (m1.1 - m1.0) * (m2.1 - m2.0), + ] + }); + + if fold.is_some() { + m0.truncate(m0.len() / 2); + m1.truncate(m1.len() / 2); + m2.truncate(m2.len() / 2); + } + + let mut hhat_i_coeffs = [FieldElement::from(0); 4]; + + hhat_i_coeffs[0] = hhat_i_at_0; + hhat_i_coeffs[2] = + HALF * (claimed_value + hhat_i_at_em1 - hhat_i_at_0 - hhat_i_at_0 - hhat_i_at_0); + hhat_i_coeffs[3] = hhat_i_at_inf_over_x_cube; + hhat_i_coeffs[1] = claimed_value + - hhat_i_coeffs[0] + - hhat_i_coeffs[0] + - hhat_i_coeffs[3] + - hhat_i_coeffs[2]; + + assert_eq!( + claimed_value, + hhat_i_coeffs[0] + + hhat_i_coeffs[0] + + hhat_i_coeffs[1] + + hhat_i_coeffs[2] + + hhat_i_coeffs[3], + "Sumcheck binding check failed" + ); + + merlin.add_scalars(&hhat_i_coeffs[..])?; + merlin.fill_challenge_scalars(&mut sumcheck_randomness)?; + fold = Some(sumcheck_randomness[0]); + claimed_value = eval_cubic_poly(&hhat_i_coeffs, &sumcheck_randomness[0]); + sumcheck_randomness_accumulator.push(sumcheck_randomness[0]); + if m0.len() <= 2 { + break; + } + } + + let folded_v0 = m0[0] + (m0[1] - m0[0]) * sumcheck_randomness[0]; + let folded_v1 = m1[0] + (m1[1] - m1[0]) * sumcheck_randomness[0]; + let folded_v2 = m2[0] + (m2[1] - m2[0]) * sumcheck_randomness[0]; + + Ok(( + [folded_v0, folded_v1, folded_v2], + sumcheck_randomness_accumulator, + )) +} + +/// Verifies a SPARK sumcheck proof from the transcript. +/// +/// Checks that the prover's claimed sum is correct by verifying polynomial +/// evaluations at each round without recomputing the full sum. +/// +/// # Returns +/// +/// Tuple of `(accumulated_randomness, final_evaluation)` +pub fn run_sumcheck_verifier_spark( + arthur: &mut VerifierState, + variable_count: usize, + initial_sumcheck_val: FieldElement, +) -> Result<(Vec, FieldElement)> { + let mut saved_val_for_sumcheck_equality_assertion = initial_sumcheck_val; + + let mut alpha = vec![FieldElement::zero(); variable_count]; + + for i in 0..variable_count { + let mut hhat_i = [FieldElement::zero(); 4]; + let mut alpha_i = [FieldElement::zero(); 1]; + arthur.fill_next_scalars(&mut hhat_i)?; + arthur.fill_challenge_scalars(&mut alpha_i)?; + alpha[i] = alpha_i[0]; + + let hhat_i_at_zero = eval_cubic_poly(&hhat_i, &FieldElement::zero()); + let hhat_i_at_one = eval_cubic_poly(&hhat_i, &FieldElement::one()); + ensure!( + saved_val_for_sumcheck_equality_assertion == hhat_i_at_zero + hhat_i_at_one, + "Sumcheck equality check failed" + ); + saved_val_for_sumcheck_equality_assertion = eval_cubic_poly(&hhat_i, &alpha_i[0]); + } + + Ok((alpha, saved_val_for_sumcheck_equality_assertion)) +} diff --git a/provekit/spark/src/types.rs b/provekit/spark/src/types.rs new file mode 100644 index 00000000..034122c7 --- /dev/null +++ b/provekit/spark/src/types.rs @@ -0,0 +1,102 @@ +use ::{ + provekit_common::{FieldElement, WhirConfig}, + serde::{Deserialize, Serialize}, +}; + +/// Complete SPARK proof including transcript and configuration. +#[derive(Serialize, Deserialize)] +pub struct SPARKProof { + pub transcript: Vec, + pub io_pattern: String, + pub whir_params: SPARKWHIRConfigs, + pub matrix_dimensions: MatrixDimensions, +} + +/// Dimensions of the R1CS matrices used in the proof. +#[derive(Serialize, Deserialize, Clone)] +pub struct MatrixDimensions { + pub num_rows: usize, + pub num_cols: usize, + pub nonzero_terms: usize, +} + +/// WHIR commitment scheme configurations for different vector sizes. +#[derive(Serialize, Deserialize, Clone)] +pub struct SPARKWHIRConfigs { + pub row: WhirConfig, + pub col: WhirConfig, + pub num_terms_3batched: WhirConfig, + pub num_terms_5batched: WhirConfig, +} + +pub use provekit_common::spark::{ClaimedValues, Point, SPARKRequest}; + +/// SPARK matrix in COO format with memory access timestamps. +#[derive(Debug, Clone)] +pub struct SparkMatrix { + pub coo: COOMatrix, + pub timestamps: TimeStamps, +} + +/// Coordinate (COO) sparse matrix format storing row/col indices and values. +#[derive(Debug, Clone)] +pub struct COOMatrix { + pub row: Vec, + pub col: Vec, + pub val: Vec, + pub val_a: Vec, + pub val_b: Vec, + pub val_c: Vec, +} + +/// Memory access timestamps for GPA protocol. +#[derive(Debug, Clone)] +pub struct TimeStamps { + pub read_row: Vec, + pub read_col: Vec, + pub final_row: Vec, + pub final_col: Vec, +} + +/// Precomputed equality check evaluations for memory arguments. +#[derive(Debug, Clone)] +pub struct Memory { + pub eq_rx: Vec, + pub eq_ry: Vec, +} + +/// Row and column evaluation vectors at the challenge point. +#[derive(Debug, Clone)] +pub struct EValuesForMatrix { + pub e_rx: Vec, + pub e_ry: Vec, +} + +use provekit_common::gnark::WHIRConfigGnark; + +/// SPARK proof formatted for Gnark recursive verifier. +#[derive(Serialize, Deserialize)] +pub struct SPARKProofGnark { + pub transcript: Vec, + pub io_pattern: String, + pub whir_row: WHIRConfigGnark, + pub whir_col: WHIRConfigGnark, + pub whir_3batched: WHIRConfigGnark, + pub whir_5batched: WHIRConfigGnark, + pub log_num_terms: usize, +} + +impl SPARKProofGnark { + /// Converts SPARK proof to Gnark-compatible format. + pub fn from_proof(proof: &SPARKProof, log_num_terms: usize) -> Self { + Self { + transcript: proof.transcript.clone(), + io_pattern: proof.io_pattern.clone(), + whir_row: WHIRConfigGnark::new(&proof.whir_params.row), + whir_col: WHIRConfigGnark::new(&proof.whir_params.col), + whir_3batched: WHIRConfigGnark::new(&proof.whir_params.num_terms_3batched), + whir_5batched: WHIRConfigGnark::new(&proof.whir_params.num_terms_5batched), + log_num_terms, + } + } +} diff --git a/provekit/spark/src/utils.rs b/provekit/spark/src/utils.rs new file mode 100644 index 00000000..94d8773a --- /dev/null +++ b/provekit/spark/src/utils.rs @@ -0,0 +1,64 @@ +pub use crate::types::{Memory, Point, SPARKRequest}; +use ::{ + anyhow::{Context, Result}, + provekit_common::{ + utils::{next_power_of_two, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, + FieldElement, R1CS, + }, + spongefish::codecs::arkworks_algebra::FieldDomainSeparator, + std::{fs, path::Path}, +}; + +/// Deserializes R1CS from JSON and pads matrices to power-of-2 dimensions. +pub fn deserialize_r1cs(path: impl AsRef) -> Result { + let json_str = fs::read_to_string(path).context("Failed to read R1CS file")?; + let mut r1cs: R1CS = serde_json::from_str(&json_str).context("Failed to deserialize R1CS")?; + r1cs.grow_matrices( + 1 << next_power_of_two(r1cs.num_constraints()), + 1 << next_power_of_two(r1cs.num_witnesses()), + ); + Ok(r1cs) +} + +/// Deserializes SPARK request from JSON. +pub fn deserialize_request(path: impl AsRef) -> Result { + let json_str = fs::read_to_string(path).context("Failed to read request file")?; + serde_json::from_str(&json_str).context("Failed to deserialize request") +} + +/// Computes equality check evaluations for row and column points. +pub fn calculate_memory(point_to_evaluate: Point) -> Memory { + Memory { + eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), + eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col[1..]) + .iter() + .map(|x| *x * (FieldElement::from(1) - point_to_evaluate.col[0])) + .collect(), + } +} + +/// Trait extending IO patterns with SPARK-specific domain separators. +pub trait SPARKDomainSeparator { + fn add_tau_and_gamma(self) -> Self; + fn add_line(self) -> Self; + fn add_claimed_evaluations(self) -> Self; +} + +impl SPARKDomainSeparator for IOPattern +where + IOPattern: FieldDomainSeparator, +{ + fn add_tau_and_gamma(self) -> Self { + self.challenge_scalars(2, "tau and gamma") + } + + fn add_line(self) -> Self { + self.add_scalars(2, "gpa line") + .challenge_scalars(1, "gpa line random") + } + + fn add_claimed_evaluations(self) -> Self { + self.add_scalars(3, "claimed evaluations") + .challenge_scalars(1, "matrix combination randomness") + } +} diff --git a/provekit/spark/src/verifier.rs b/provekit/spark/src/verifier.rs new file mode 100644 index 00000000..9683fdde --- /dev/null +++ b/provekit/spark/src/verifier.rs @@ -0,0 +1,159 @@ +use { + crate::{ + memory::{verify_colwise, verify_rowwise}, + sumcheck::run_sumcheck_verifier_spark, + types::{MatrixDimensions, SPARKProof, SPARKRequest, SPARKWHIRConfigs}, + }, + anyhow::{ensure, Context, Result}, + provekit_common::{ + skyscraper::SkyscraperSponge, utils::next_power_of_two, FieldElement, IOPattern, + }, + spongefish::codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, + whir::{ + poly_utils::multilinear::MultilinearPoint, + whir::{ + committer::CommitmentReader, + statement::{Statement, Weights}, + utils::HintDeserialize, + verifier::Verifier, + }, + }, +}; + +/// SPARK verification interface. +pub trait SPARKVerifier { + /// Verifies a SPARK proof against the given request. + fn verify(&self, proof: &SPARKProof, request: &SPARKRequest) -> Result<()>; +} + +/// SPARK verification scheme with configuration extracted from proof. +pub struct SPARKScheme { + pub whir_configs: SPARKWHIRConfigs, + pub io_pattern: IOPattern, + pub matrix_dimensions: MatrixDimensions, +} + +impl SPARKScheme { + /// Constructs verifier scheme from proof metadata. + pub fn from_proof(proof: &SPARKProof) -> Self { + Self { + whir_configs: proof.whir_params.clone(), + io_pattern: IOPattern::from_string(proof.io_pattern.clone()), + matrix_dimensions: proof.matrix_dimensions.clone(), + } + } +} + +impl SPARKVerifier for SPARKScheme { + fn verify(&self, proof: &SPARKProof, request: &SPARKRequest) -> Result<()> { + let io = IOPattern::from_string(proof.io_pattern.clone()); + let mut arthur = io.to_verifier_state(&proof.transcript); + + let _point_row: Vec = arthur.hint()?; + let _point_col: Vec = arthur.hint()?; + + let mut claimed_values = [FieldElement::from(0); 3]; + arthur.fill_next_scalars(&mut claimed_values)?; + + let mut matrix_batching_randomness = [FieldElement::from(0); 1]; + arthur.fill_challenge_scalars(&mut matrix_batching_randomness)?; + let matrix_batching_randomness = matrix_batching_randomness[0]; + + let claimed_value = claimed_values[0] + + claimed_values[1] * matrix_batching_randomness + + claimed_values[2] * matrix_batching_randomness * matrix_batching_randomness; + + verify_spark_single_matrix( + &matrix_batching_randomness, + &proof.whir_params, + proof.matrix_dimensions.clone(), + &mut arthur, + request, + &claimed_value, + ) + } +} + +/// Core SPARK verification: sumcheck + row/col memory checks. +fn verify_spark_single_matrix( + matrix_batching_randomness: &FieldElement, + whir_params: &SPARKWHIRConfigs, + matrix_dimensions: MatrixDimensions, + arthur: &mut spongefish::VerifierState, + request: &SPARKRequest, + claimed_value: &FieldElement, +) -> Result<()> { + let commitment_reader_row = CommitmentReader::new(&whir_params.row); + let commitment_reader_col = CommitmentReader::new(&whir_params.col); + + let a_3batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_3batched); + let a_5batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_5batched); + + let a_sumcheck_commitment = a_5batched_commitment_reader.parse_commitment(arthur)?; + let _a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let _a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + + let _a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur)?; + let _a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur)?; + + let (randomness, a_last_sumcheck_value) = run_sumcheck_verifier_spark( + arthur, + next_power_of_two(matrix_dimensions.nonzero_terms), + *claimed_value, + ) + .context("While verifying SPARK sumcheck")?; + + let final_folds: Vec = arthur.hint()?; + + let claimed_val = final_folds[0] + + final_folds[1] * matrix_batching_randomness + + final_folds[2] * matrix_batching_randomness * matrix_batching_randomness; + ensure!(a_last_sumcheck_value == claimed_val * final_folds[3] * final_folds[4]); + + let mut a_spark_sumcheck_statement_verifier = + Statement::::new(next_power_of_two(matrix_dimensions.nonzero_terms)); + + // Batching randomness powers: [1, β, β², β³, β⁴] + let mut batching_randomness = Vec::with_capacity(5); + let mut cur = FieldElement::from(1); + for _ in 0..5 { + batching_randomness.push(cur); + cur *= a_sumcheck_commitment.batching_randomness; + } + + a_spark_sumcheck_statement_verifier.add_constraint( + Weights::evaluation(MultilinearPoint(randomness.clone())), + final_folds[0] * batching_randomness[0] + + final_folds[1] * batching_randomness[1] + + final_folds[2] * batching_randomness[2] + + final_folds[3] * batching_randomness[3] + + final_folds[4] * batching_randomness[4], + ); + + let a_spark_sumcheck_verifier = Verifier::new(&whir_params.num_terms_5batched); + a_spark_sumcheck_verifier.verify( + arthur, + &a_sumcheck_commitment, + &a_spark_sumcheck_statement_verifier, + )?; + + verify_rowwise( + arthur, + matrix_dimensions.num_rows, + matrix_dimensions.nonzero_terms, + whir_params, + request, + matrix_batching_randomness, + )?; + + verify_colwise( + arthur, + matrix_dimensions.num_cols, + matrix_dimensions.nonzero_terms, + whir_params, + request, + matrix_batching_randomness, + )?; + + Ok(()) +} diff --git a/tooling/spark-cli/Cargo.toml b/tooling/spark-cli/Cargo.toml new file mode 100644 index 00000000..5880191c --- /dev/null +++ b/tooling/spark-cli/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "spark-cli" +version = "0.1.0" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true + +[dependencies] +provekit-common.workspace = true +provekit-spark = { path = "../../provekit/spark" } +anyhow.workspace = true +argh = "0.1.12" +serde_json.workspace = true + +[lints] +workspace = true + diff --git a/tooling/spark-cli/src/cmd/mod.rs b/tooling/spark-cli/src/cmd/mod.rs new file mode 100644 index 00000000..ec5e380f --- /dev/null +++ b/tooling/spark-cli/src/cmd/mod.rs @@ -0,0 +1,2 @@ +pub mod prove; +pub mod verify; diff --git a/tooling/spark-cli/src/cmd/prove.rs b/tooling/spark-cli/src/cmd/prove.rs new file mode 100644 index 00000000..a93cb4ea --- /dev/null +++ b/tooling/spark-cli/src/cmd/prove.rs @@ -0,0 +1,65 @@ +use ::{ + anyhow::{Context, Result}, + argh::FromArgs, + provekit_spark::{ + deserialize_r1cs, deserialize_request, SPARKProofGnark, SPARKProver, SPARKProverScheme, + }, + std::{fs::File, io::Write, path::PathBuf}, +}; + +#[derive(FromArgs)] +#[argh(subcommand, name = "prove")] +#[argh(description = "Generate a SPARK proof")] +pub struct ProveArgs { + /// path to R1CS file + #[argh(option)] + r1cs: PathBuf, + + /// path to request file + #[argh(option)] + request: PathBuf, + + /// output path for proof (default: spark_proof.json) + #[argh(option, short = 'o', default = "PathBuf::from(\"spark_proof.json\")")] + output: PathBuf, + + /// output path for gnark proof (default: gnark_spark_proof.json) + #[argh(option, default = "PathBuf::from(\"gnark_spark_proof.json\")")] + gnark_output: PathBuf, +} + +pub fn execute(args: ProveArgs) -> Result<()> { + println!("Loading R1CS from {:?}...", args.r1cs); + let r1cs = deserialize_r1cs(&args.r1cs).context("Failed to load R1CS")?; + + println!("Loading request from {:?}...", args.request); + let request = deserialize_request(&args.request).context("Failed to load request")?; + + println!("Creating SPARK scheme..."); + let scheme = SPARKProverScheme::new_for_r1cs(&r1cs); + + println!("Generating proof..."); + let proof = scheme + .prove(&r1cs, &request) + .context("Failed to generate proof")?; + + // Write proof + println!("Writing proof to {:?}...", args.output); + let mut file = File::create(&args.output).context("Failed to create output file")?; + file.write_all(serde_json::to_string(&proof)?.as_bytes()) + .context("Failed to write proof")?; + + // Write gnark proof + println!("Writing gnark proof to {:?}...", args.gnark_output); + let log_num_terms = + provekit_common::utils::next_power_of_two(proof.matrix_dimensions.nonzero_terms); + let gnark_proof = SPARKProofGnark::from_proof(&proof, log_num_terms); + let mut gnark_file = + File::create(&args.gnark_output).context("Failed to create gnark output file")?; + gnark_file + .write_all(serde_json::to_string(&gnark_proof)?.as_bytes()) + .context("Failed to write gnark proof")?; + + println!("✓ Proof generated successfully"); + Ok(()) +} diff --git a/tooling/spark-cli/src/cmd/verify.rs b/tooling/spark-cli/src/cmd/verify.rs new file mode 100644 index 00000000..566de636 --- /dev/null +++ b/tooling/spark-cli/src/cmd/verify.rs @@ -0,0 +1,40 @@ +use ::{ + anyhow::{Context, Result}, + argh::FromArgs, + provekit_spark::{deserialize_request, SPARKProof, SPARKVerifier, SPARKVerifierScheme}, + std::{fs, path::PathBuf}, +}; + +#[derive(FromArgs)] +#[argh(subcommand, name = "verify")] +#[argh(description = "Verify a SPARK proof")] +pub struct VerifyArgs { + /// path to proof file + #[argh(option)] + proof: PathBuf, + + /// path to request file + #[argh(option)] + request: PathBuf, +} + +pub fn execute(args: VerifyArgs) -> Result<()> { + println!("Loading proof from {:?}...", args.proof); + let proof_str = fs::read_to_string(&args.proof).context("Failed to read proof file")?; + let proof: SPARKProof = + serde_json::from_str(&proof_str).context("Failed to deserialize proof")?; + + println!("Loading request from {:?}...", args.request); + let request = deserialize_request(&args.request).context("Failed to load request")?; + + println!("Creating verification scheme..."); + let scheme = SPARKVerifierScheme::from_proof(&proof); + + println!("Verifying proof..."); + scheme + .verify(&proof, &request) + .context("Verification failed")?; + + println!("✓ Proof verified successfully"); + Ok(()) +} diff --git a/tooling/spark-cli/src/main.rs b/tooling/spark-cli/src/main.rs new file mode 100644 index 00000000..8a20bc86 --- /dev/null +++ b/tooling/spark-cli/src/main.rs @@ -0,0 +1,26 @@ +mod cmd; + +use ::{anyhow::Result, argh::FromArgs}; + +#[derive(FromArgs)] +#[argh(description = "SPARK Prover CLI")] +struct Args { + #[argh(subcommand)] + command: Command, +} + +#[derive(FromArgs)] +#[argh(subcommand)] +enum Command { + Prove(cmd::prove::ProveArgs), + Verify(cmd::verify::VerifyArgs), +} + +fn main() -> Result<()> { + let args: Args = argh::from_env(); + + match args.command { + Command::Prove(args) => cmd::prove::execute(args), + Command::Verify(args) => cmd::verify::execute(args), + } +} From b0960cf4dc6f68abbed9c5fba56c93d862e5c2d5 Mon Sep 17 00:00:00 2001 From: shreyas-londhe Date: Fri, 10 Oct 2025 09:51:36 +0530 Subject: [PATCH 30/34] fix: spark proving --- Cargo.toml | 1 + provekit/common/src/noir_proof_scheme.rs | 2 ++ provekit/common/src/spark.rs | 15 ++++++++--- provekit/prover/src/noir_proof_scheme.rs | 7 ++++-- provekit/prover/src/whir_r1cs.rs | 25 +++++++++++-------- provekit/spark/src/bin/generate_test_r1cs.rs | 2 +- .../spark/src/bin/generate_test_request.rs | 6 ++--- provekit/spark/src/gpa.rs | 2 +- provekit/spark/src/lib.rs | 5 +--- provekit/spark/src/memory.rs | 9 +++---- provekit/spark/src/prover.rs | 7 +++--- provekit/spark/src/sumcheck.rs | 2 +- provekit/spark/src/types.rs | 4 +-- provekit/spark/src/utils.rs | 7 +++--- provekit/spark/src/verifier.rs | 11 ++++---- provekit/verifier/src/whir_r1cs.rs | 3 +-- spark-prover/src/bin/generate_test_request.rs | 4 +-- spark-prover/src/bin/spark-verifier.rs | 8 +++--- spark-prover/src/utilities/mod.rs | 4 +-- tooling/spark-cli/Cargo.toml | 4 +-- tooling/spark-cli/src/cmd/prove.rs | 25 +++++++++++-------- tooling/spark-cli/src/cmd/verify.rs | 12 ++++----- tooling/spark-cli/src/main.rs | 2 +- 23 files changed, 91 insertions(+), 76 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f6760f26..269033f9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -88,6 +88,7 @@ provekit-prover = { path = "provekit/prover" } provekit-r1cs-compiler = { path = "provekit/r1cs-compiler" } provekit-verifier = { path = "provekit/verifier" } provekit-verifier-server = { path = "tooling/verifier-server" } +provekit-spark = { path = "provekit/spark" } # 3rd party anyhow = "1.0.93" diff --git a/provekit/common/src/noir_proof_scheme.rs b/provekit/common/src/noir_proof_scheme.rs index b5a9cb3a..e90e3943 100644 --- a/provekit/common/src/noir_proof_scheme.rs +++ b/provekit/common/src/noir_proof_scheme.rs @@ -1,5 +1,6 @@ use { crate::{ + spark::SparkStatement, whir_r1cs::{WhirR1CSProof, WhirR1CSScheme}, witness::{NoirWitnessGenerator, WitnessBuilder}, NoirElement, R1CS, @@ -25,6 +26,7 @@ pub struct NoirProofScheme { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct NoirProof { pub whir_r1cs_proof: WhirR1CSProof, + pub spark_statement: SparkStatement, } impl NoirProofScheme { diff --git a/provekit/common/src/spark.rs b/provekit/common/src/spark.rs index f89ec611..5c314dbc 100644 --- a/provekit/common/src/spark.rs +++ b/provekit/common/src/spark.rs @@ -1,9 +1,12 @@ use { crate::{utils::serde_ark, FieldElement}, + ark_serialize::{CanonicalDeserialize, CanonicalSerialize}, serde::{Deserialize, Serialize}, }; -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive( + Debug, Clone, PartialEq, Eq, CanonicalSerialize, Serialize, CanonicalDeserialize, Deserialize, +)] pub struct Point { #[serde(with = "serde_ark")] pub row: Vec, @@ -11,7 +14,9 @@ pub struct Point { pub col: Vec, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive( + Debug, Clone, PartialEq, Eq, CanonicalSerialize, Serialize, CanonicalDeserialize, Deserialize, +)] pub struct ClaimedValues { #[serde(with = "serde_ark")] pub a: FieldElement, @@ -21,8 +26,10 @@ pub struct ClaimedValues { pub c: FieldElement, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct SPARKRequest { +#[derive( + Debug, Clone, PartialEq, Eq, CanonicalSerialize, Serialize, CanonicalDeserialize, Deserialize, +)] +pub struct SparkStatement { pub point_to_evaluate: Point, pub claimed_values: ClaimedValues, } diff --git a/provekit/prover/src/noir_proof_scheme.rs b/provekit/prover/src/noir_proof_scheme.rs index 37ec5f46..e17a4dbe 100644 --- a/provekit/prover/src/noir_proof_scheme.rs +++ b/provekit/prover/src/noir_proof_scheme.rs @@ -83,12 +83,15 @@ impl NoirProofSchemeProver for NoirProofScheme { .context("While verifying R1CS instance")?; // Prove R1CS instance - let whir_r1cs_proof = self + let (whir_r1cs_proof, spark_statement) = self .whir_for_witness .prove(&self.r1cs, witness) .context("While proving R1CS instance")?; - Ok(NoirProof { whir_r1cs_proof }) + Ok(NoirProof { + whir_r1cs_proof, + spark_statement, + }) } fn create_witness_io_pattern(&self) -> IOPattern { diff --git a/provekit/prover/src/whir_r1cs.rs b/provekit/prover/src/whir_r1cs.rs index 65458535..e31a49b2 100644 --- a/provekit/prover/src/whir_r1cs.rs +++ b/provekit/prover/src/whir_r1cs.rs @@ -5,7 +5,7 @@ use { provekit_common::{ file::write, skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, - spark::{self, ClaimedValues, Point, SPARKRequest}, + spark::{ClaimedValues, Point, SparkStatement}, utils::{ pad_to_power_of_two, sumcheck::{ @@ -37,12 +37,20 @@ use { }; pub trait WhirR1CSProver { - fn prove(&self, r1cs: &R1CS, witness: Vec) -> Result; + fn prove( + &self, + r1cs: &R1CS, + witness: Vec, + ) -> Result<(WhirR1CSProof, SparkStatement)>; } impl WhirR1CSProver for WhirR1CSScheme { #[instrument(skip_all)] - fn prove(&self, r1cs: &R1CS, witness: Vec) -> Result { + fn prove( + &self, + r1cs: &R1CS, + witness: Vec, + ) -> Result<(WhirR1CSProof, SparkStatement)> { ensure!( witness.len() == r1cs.num_witnesses(), "Unexpected witness length for R1CS instance" @@ -97,9 +105,7 @@ impl WhirR1CSProver for WhirR1CSScheme { let (merlin, whir_randomness, deferred_evaluations) = run_zk_whir_pcs_prover(commitment_to_witness, statement, &self.whir_witness, merlin); - let transcript = merlin.narg_string().to_vec(); - - let spark_request: SPARKRequest = SPARKRequest { + let spark_statement: SparkStatement = SparkStatement { point_to_evaluate: Point { row: alpha, col: whir_randomness.0, @@ -111,10 +117,9 @@ impl WhirR1CSProver for WhirR1CSScheme { }, }; - let mut spark_request_file = File::create("spark_request.json")?; // Creates or truncates the spark_request_file - spark_request_file.write_all(serde_json::to_string(&spark_request).unwrap().as_bytes())?; // Writes bytes to the file + let transcript = merlin.narg_string().to_vec(); - Ok(WhirR1CSProof { transcript }) + Ok((WhirR1CSProof { transcript }, spark_statement)) } } @@ -148,7 +153,7 @@ pub fn compute_blinding_coefficients_for_round( let two = FieldElement::one() + FieldElement::one(); let mut prefix_multiplier = FieldElement::one(); - for _ in 0..(n - 1 - compute_for) { + for _ in 0..n - 1 - compute_for { prefix_multiplier = prefix_multiplier + prefix_multiplier; } let suffix_multiplier: ark_ff::Fp< diff --git a/provekit/spark/src/bin/generate_test_r1cs.rs b/provekit/spark/src/bin/generate_test_r1cs.rs index ebf2f034..aec6764e 100644 --- a/provekit/spark/src/bin/generate_test_r1cs.rs +++ b/provekit/spark/src/bin/generate_test_r1cs.rs @@ -1,4 +1,4 @@ -use ::{ +use { provekit_common::{FieldElement, R1CS}, std::{fs::File, io::Write}, }; diff --git a/provekit/spark/src/bin/generate_test_request.rs b/provekit/spark/src/bin/generate_test_request.rs index 0a6bffe1..5d9af972 100644 --- a/provekit/spark/src/bin/generate_test_request.rs +++ b/provekit/spark/src/bin/generate_test_request.rs @@ -1,6 +1,6 @@ -use ::{ +use { provekit_common::{ - spark::{ClaimedValues, Point, SPARKRequest}, + spark::{ClaimedValues, Point, SparkStatement}, FieldElement, }, std::{fs::File, io::Write}, @@ -12,7 +12,7 @@ fn main() { row[7] = FieldElement::from(1); - let spark_request = SPARKRequest { + let spark_request = SparkStatement { point_to_evaluate: Point { row, col }, claimed_values: ClaimedValues { a: FieldElement::from(1), diff --git a/provekit/spark/src/gpa.rs b/provekit/spark/src/gpa.rs index 3ce61813..f9e22809 100644 --- a/provekit/spark/src/gpa.rs +++ b/provekit/spark/src/gpa.rs @@ -1,4 +1,4 @@ -use ::{ +use { provekit_common::{ skyscraper::SkyscraperSponge, utils::{ diff --git a/provekit/spark/src/lib.rs b/provekit/spark/src/lib.rs index 98041ac2..1312dee4 100644 --- a/provekit/spark/src/lib.rs +++ b/provekit/spark/src/lib.rs @@ -9,10 +9,7 @@ pub mod verifier; pub use { prover::{SPARKProver, SPARKScheme as SPARKProverScheme}, - types::{ - ClaimedValues, MatrixDimensions, Point, SPARKProof, SPARKProofGnark, SPARKRequest, - SPARKWHIRConfigs, - }, + types::{MatrixDimensions, SPARKProof, SPARKProofGnark, SPARKWHIRConfigs}, utils::{calculate_memory, deserialize_r1cs, deserialize_request}, verifier::{SPARKScheme as SPARKVerifierScheme, SPARKVerifier}, }; diff --git a/provekit/spark/src/memory.rs b/provekit/spark/src/memory.rs index b0862130..48375671 100644 --- a/provekit/spark/src/memory.rs +++ b/provekit/spark/src/memory.rs @@ -7,6 +7,7 @@ use { ark_std::One, provekit_common::{ skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, + spark::SparkStatement, utils::sumcheck::calculate_eq, FieldElement, WhirConfig, }, @@ -315,7 +316,7 @@ pub fn verify_rowwise( num_rows: usize, num_nonzero_terms: usize, whir_params: &SPARKWHIRConfigs, - request: &crate::types::SPARKRequest, + request: &SparkStatement, _matrix_batching_randomness: &FieldElement, ) -> Result<()> { verify_axis( @@ -333,7 +334,7 @@ pub fn verify_colwise( num_cols: usize, num_nonzero_terms: usize, whir_params: &SPARKWHIRConfigs, - request: &crate::types::SPARKRequest, + request: &SparkStatement, _matrix_batching_randomness: &FieldElement, ) -> Result<()> { verify_axis( @@ -349,10 +350,6 @@ pub fn verify_colwise( ) } -// ============================================================================ -// Helper -// ============================================================================ - /// Helper to generate and verify a WHIR proof at a specific evaluation point. /// /// # Note diff --git a/provekit/spark/src/prover.rs b/provekit/spark/src/prover.rs index ad067e68..3f04e572 100644 --- a/provekit/spark/src/prover.rs +++ b/provekit/spark/src/prover.rs @@ -3,13 +3,14 @@ use { memory::{prove_colwise, prove_rowwise}, preprocessing::MatrixPreprocessor, sumcheck::run_spark_sumcheck, - types::{EValuesForMatrix, MatrixDimensions, SPARKProof, SPARKRequest, SPARKWHIRConfigs}, + types::{EValuesForMatrix, MatrixDimensions, SPARKProof, SPARKWHIRConfigs}, utils::{calculate_memory, SPARKDomainSeparator}, }, anyhow::Result, ark_ff::AdditiveGroup, provekit_common::{ skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, + spark::SparkStatement, utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, FieldElement, IOPattern, WhirR1CSScheme, R1CS, }, @@ -24,7 +25,7 @@ use { /// SPARK proving interface for R1CS constraint systems. pub trait SPARKProver { /// Generates a SPARK proof from R1CS and evaluation request. - fn prove(&self, r1cs: &R1CS, request: &SPARKRequest) -> Result; + fn prove(&self, r1cs: &R1CS, request: &SparkStatement) -> Result; } /// SPARK scheme with pre-configured WHIR parameters and IO pattern. @@ -131,7 +132,7 @@ impl SPARKScheme { } impl SPARKProver for SPARKScheme { - fn prove(&self, r1cs: &R1CS, request: &SPARKRequest) -> Result { + fn prove(&self, r1cs: &R1CS, request: &SparkStatement) -> Result { let processed = MatrixPreprocessor::from_r1cs(r1cs)?; let memory = calculate_memory(request.point_to_evaluate.clone()); let e_values = processed.compute_e_values(&memory); diff --git a/provekit/spark/src/sumcheck.rs b/provekit/spark/src/sumcheck.rs index ba409ecc..a02bff1e 100644 --- a/provekit/spark/src/sumcheck.rs +++ b/provekit/spark/src/sumcheck.rs @@ -1,4 +1,4 @@ -use ::{ +use { anyhow::{ensure, Result}, ark_std::{One, Zero}, provekit_common::{ diff --git a/provekit/spark/src/types.rs b/provekit/spark/src/types.rs index 034122c7..48732ff5 100644 --- a/provekit/spark/src/types.rs +++ b/provekit/spark/src/types.rs @@ -1,4 +1,4 @@ -use ::{ +use { provekit_common::{FieldElement, WhirConfig}, serde::{Deserialize, Serialize}, }; @@ -29,8 +29,6 @@ pub struct SPARKWHIRConfigs { pub num_terms_5batched: WhirConfig, } -pub use provekit_common::spark::{ClaimedValues, Point, SPARKRequest}; - /// SPARK matrix in COO format with memory access timestamps. #[derive(Debug, Clone)] pub struct SparkMatrix { diff --git a/provekit/spark/src/utils.rs b/provekit/spark/src/utils.rs index 94d8773a..0753e6b1 100644 --- a/provekit/spark/src/utils.rs +++ b/provekit/spark/src/utils.rs @@ -1,7 +1,8 @@ -pub use crate::types::{Memory, Point, SPARKRequest}; -use ::{ +pub use crate::types::Memory; +use { anyhow::{Context, Result}, provekit_common::{ + spark::{Point, SparkStatement}, utils::{next_power_of_two, sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq}, FieldElement, R1CS, }, @@ -21,7 +22,7 @@ pub fn deserialize_r1cs(path: impl AsRef) -> Result { } /// Deserializes SPARK request from JSON. -pub fn deserialize_request(path: impl AsRef) -> Result { +pub fn deserialize_request(path: impl AsRef) -> Result { let json_str = fs::read_to_string(path).context("Failed to read request file")?; serde_json::from_str(&json_str).context("Failed to deserialize request") } diff --git a/provekit/spark/src/verifier.rs b/provekit/spark/src/verifier.rs index 9683fdde..f2ff139e 100644 --- a/provekit/spark/src/verifier.rs +++ b/provekit/spark/src/verifier.rs @@ -2,11 +2,12 @@ use { crate::{ memory::{verify_colwise, verify_rowwise}, sumcheck::run_sumcheck_verifier_spark, - types::{MatrixDimensions, SPARKProof, SPARKRequest, SPARKWHIRConfigs}, + types::{MatrixDimensions, SPARKProof, SPARKWHIRConfigs}, }, anyhow::{ensure, Context, Result}, provekit_common::{ - skyscraper::SkyscraperSponge, utils::next_power_of_two, FieldElement, IOPattern, + skyscraper::SkyscraperSponge, spark::SparkStatement, utils::next_power_of_two, + FieldElement, IOPattern, }, spongefish::codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, whir::{ @@ -23,7 +24,7 @@ use { /// SPARK verification interface. pub trait SPARKVerifier { /// Verifies a SPARK proof against the given request. - fn verify(&self, proof: &SPARKProof, request: &SPARKRequest) -> Result<()>; + fn verify(&self, proof: &SPARKProof, request: &SparkStatement) -> Result<()>; } /// SPARK verification scheme with configuration extracted from proof. @@ -45,7 +46,7 @@ impl SPARKScheme { } impl SPARKVerifier for SPARKScheme { - fn verify(&self, proof: &SPARKProof, request: &SPARKRequest) -> Result<()> { + fn verify(&self, proof: &SPARKProof, request: &SparkStatement) -> Result<()> { let io = IOPattern::from_string(proof.io_pattern.clone()); let mut arthur = io.to_verifier_state(&proof.transcript); @@ -80,7 +81,7 @@ fn verify_spark_single_matrix( whir_params: &SPARKWHIRConfigs, matrix_dimensions: MatrixDimensions, arthur: &mut spongefish::VerifierState, - request: &SPARKRequest, + request: &SparkStatement, claimed_value: &FieldElement, ) -> Result<()> { let commitment_reader_row = CommitmentReader::new(&whir_params.row); diff --git a/provekit/verifier/src/whir_r1cs.rs b/provekit/verifier/src/whir_r1cs.rs index e56bbb6b..bb862a8a 100644 --- a/provekit/verifier/src/whir_r1cs.rs +++ b/provekit/verifier/src/whir_r1cs.rs @@ -46,8 +46,7 @@ impl WhirR1CSVerifier for WhirR1CSScheme { let data_from_sumcheck_verifier = run_sumcheck_verifier( &mut arthur, self.m_0, - &self.whir_for_hiding_spartan, - // proof.whir_spartan_blinding_values, + &self.whir_for_hiding_spartan, // proof.whir_spartan_blinding_values, ) .context("while verifying sumcheck")?; diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs index fdb562de..682ae62d 100644 --- a/spark-prover/src/bin/generate_test_request.rs +++ b/spark-prover/src/bin/generate_test_request.rs @@ -1,6 +1,6 @@ use { provekit_common::{ - spark::{ClaimedValues, Point, SPARKRequest}, + spark::{ClaimedValues, Point, SparkStatement}, FieldElement, }, std::{fs::File, io::Write}, @@ -12,7 +12,7 @@ fn main() { row[7] = FieldElement::from(1); - let spark_request = SPARKRequest { + let spark_request = SparkStatement { point_to_evaluate: Point { row, col }, claimed_values: ClaimedValues { a: FieldElement::from(1), diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs index da04734e..34395742 100644 --- a/spark-prover/src/bin/spark-verifier.rs +++ b/spark-prover/src/bin/spark-verifier.rs @@ -4,7 +4,7 @@ use { ark_std::{One, Zero}, provekit_common::{ skyscraper::SkyscraperSponge, - spark::SPARKRequest, + spark::SparkStatement, utils::{ next_power_of_two, sumcheck::{calculate_eq, eval_cubic_poly}, @@ -53,7 +53,7 @@ fn main() -> Result<()> { let request_json_str = fs::read_to_string(args.request).context("Error: Failed to open the request file")?; - let request: SPARKRequest = + let request: SparkStatement = serde_json::from_str(&request_json_str).context("Error: Failed to deserialize request")?; let io = IOPattern::from_string(spark_proof.io_pattern.clone()); @@ -94,7 +94,7 @@ pub fn verify_spark_single_matrix( num_cols: usize, num_nonzero_terms: usize, arthur: &mut VerifierState, - request: &SPARKRequest, + request: &SparkStatement, claimed_value: &FieldElement, ) -> Result<()> { let commitment_reader_row = CommitmentReader::new(&whir_params.row); @@ -388,7 +388,7 @@ pub fn gpa_sumcheck_verifier( prev_rand = rand; rand = Vec::::new(); - for i in 1..(height_of_binary_tree - 1) { + for i in 1..height_of_binary_tree - 1 { for _ in 0..i { arthur .fill_next_scalars(&mut h) diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs index d010dec6..fd8fd888 100644 --- a/spark-prover/src/utilities/mod.rs +++ b/spark-prover/src/utilities/mod.rs @@ -9,7 +9,7 @@ use { crate::utilities::whir::SPARKWHIRConfigsNew, anyhow::{Context, Result}, provekit_common::{ - gnark::WHIRConfigGnark, spark::SPARKRequest, utils::next_power_of_two, R1CS, + gnark::WHIRConfigGnark, spark::SparkStatement, utils::next_power_of_two, R1CS, }, serde::{Deserialize, Serialize}, std::{fs, path::PathBuf}, @@ -26,7 +26,7 @@ pub fn deserialize_r1cs(path: &PathBuf) -> Result { Ok(r1cs) } -pub fn deserialize_request(path: &PathBuf) -> Result { +pub fn deserialize_request(path: &PathBuf) -> Result { let json_str = fs::read_to_string(path).context("Error: Failed to open the request.json file")?; serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") diff --git a/tooling/spark-cli/Cargo.toml b/tooling/spark-cli/Cargo.toml index 5880191c..2d442869 100644 --- a/tooling/spark-cli/Cargo.toml +++ b/tooling/spark-cli/Cargo.toml @@ -10,9 +10,9 @@ repository.workspace = true [dependencies] provekit-common.workspace = true -provekit-spark = { path = "../../provekit/spark" } +provekit-spark.workspace = true anyhow.workspace = true -argh = "0.1.12" +argh.workspace = true serde_json.workspace = true [lints] diff --git a/tooling/spark-cli/src/cmd/prove.rs b/tooling/spark-cli/src/cmd/prove.rs index a93cb4ea..78491f83 100644 --- a/tooling/spark-cli/src/cmd/prove.rs +++ b/tooling/spark-cli/src/cmd/prove.rs @@ -1,9 +1,8 @@ -use ::{ +use { anyhow::{Context, Result}, argh::FromArgs, - provekit_spark::{ - deserialize_r1cs, deserialize_request, SPARKProofGnark, SPARKProver, SPARKProverScheme, - }, + provekit_common::{file::read, NoirProof}, + provekit_spark::{deserialize_r1cs, SPARKProofGnark, SPARKProver, SPARKProverScheme}, std::{fs::File, io::Write, path::PathBuf}, }; @@ -15,9 +14,9 @@ pub struct ProveArgs { #[argh(option)] r1cs: PathBuf, - /// path to request file + /// path to NoirProof file (.np or .json) containing the SPARK statement #[argh(option)] - request: PathBuf, + noir_proof: PathBuf, /// output path for proof (default: spark_proof.json) #[argh(option, short = 'o', default = "PathBuf::from(\"spark_proof.json\")")] @@ -32,15 +31,19 @@ pub fn execute(args: ProveArgs) -> Result<()> { println!("Loading R1CS from {:?}...", args.r1cs); let r1cs = deserialize_r1cs(&args.r1cs).context("Failed to load R1CS")?; - println!("Loading request from {:?}...", args.request); - let request = deserialize_request(&args.request).context("Failed to load request")?; + println!("Loading NoirProof from {:?}...", args.noir_proof); + let noir_proof: NoirProof = read(&args.noir_proof).context("Failed to read NoirProof file")?; + + // Extract SPARK statement from the proof + let spark_statement = noir_proof.spark_statement; + println!("✓ Extracted SPARK statement from NoirProof"); println!("Creating SPARK scheme..."); let scheme = SPARKProverScheme::new_for_r1cs(&r1cs); - println!("Generating proof..."); + println!("Generating SPARK proof..."); let proof = scheme - .prove(&r1cs, &request) + .prove(&r1cs, &spark_statement) .context("Failed to generate proof")?; // Write proof @@ -60,6 +63,6 @@ pub fn execute(args: ProveArgs) -> Result<()> { .write_all(serde_json::to_string(&gnark_proof)?.as_bytes()) .context("Failed to write gnark proof")?; - println!("✓ Proof generated successfully"); + println!("✓ SPARK proof generated successfully"); Ok(()) } diff --git a/tooling/spark-cli/src/cmd/verify.rs b/tooling/spark-cli/src/cmd/verify.rs index 566de636..5a4357c2 100644 --- a/tooling/spark-cli/src/cmd/verify.rs +++ b/tooling/spark-cli/src/cmd/verify.rs @@ -1,4 +1,4 @@ -use ::{ +use { anyhow::{Context, Result}, argh::FromArgs, provekit_spark::{deserialize_request, SPARKProof, SPARKVerifier, SPARKVerifierScheme}, @@ -13,9 +13,9 @@ pub struct VerifyArgs { #[argh(option)] proof: PathBuf, - /// path to request file + /// path to statement file #[argh(option)] - request: PathBuf, + statement: PathBuf, } pub fn execute(args: VerifyArgs) -> Result<()> { @@ -24,15 +24,15 @@ pub fn execute(args: VerifyArgs) -> Result<()> { let proof: SPARKProof = serde_json::from_str(&proof_str).context("Failed to deserialize proof")?; - println!("Loading request from {:?}...", args.request); - let request = deserialize_request(&args.request).context("Failed to load request")?; + println!("Loading statement from {:?}...", args.statement); + let statement = deserialize_request(&args.statement).context("Failed to load statement")?; println!("Creating verification scheme..."); let scheme = SPARKVerifierScheme::from_proof(&proof); println!("Verifying proof..."); scheme - .verify(&proof, &request) + .verify(&proof, &statement) .context("Verification failed")?; println!("✓ Proof verified successfully"); diff --git a/tooling/spark-cli/src/main.rs b/tooling/spark-cli/src/main.rs index 8a20bc86..f14c8370 100644 --- a/tooling/spark-cli/src/main.rs +++ b/tooling/spark-cli/src/main.rs @@ -1,6 +1,6 @@ mod cmd; -use ::{anyhow::Result, argh::FromArgs}; +use {anyhow::Result, argh::FromArgs}; #[derive(FromArgs)] #[argh(description = "SPARK Prover CLI")] From 12bd1e19dd672f77087164c3a46930240b5e4611 Mon Sep 17 00:00:00 2001 From: shreyas-londhe Date: Sat, 11 Oct 2025 17:49:09 +0530 Subject: [PATCH 31/34] feat: e2e working --- provekit/spark/src/memory.rs | 108 ++++++++++++++++------------ provekit/spark/src/preprocessing.rs | 56 +++++++-------- provekit/spark/src/prover.rs | 37 ++++++---- provekit/spark/src/verifier.rs | 14 ++-- tooling/spark-cli/src/cmd/prove.rs | 19 +++-- tooling/spark-cli/src/cmd/verify.rs | 25 ++++--- 6 files changed, 147 insertions(+), 112 deletions(-) diff --git a/provekit/spark/src/memory.rs b/provekit/spark/src/memory.rs index 48375671..e116a633 100644 --- a/provekit/spark/src/memory.rs +++ b/provekit/spark/src/memory.rs @@ -4,18 +4,21 @@ use { types::{Memory, SPARKWHIRConfigs, SparkMatrix}, }, anyhow::{ensure, Result}, + ark_ff::{Fp, MontBackend}, ark_std::One, + itertools::izip, provekit_common::{ skyscraper::{SkyscraperMerkleConfig, SkyscraperSponge}, spark::SparkStatement, - utils::sumcheck::calculate_eq, + utils::{next_power_of_two, sumcheck::calculate_eq}, FieldElement, WhirConfig, }, spongefish::{codecs::arkworks_algebra::UnitToField, ProverState, VerifierState}, whir::{ + crypto::fields::BN254Config, poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, whir::{ - committer::{CommitmentReader, Witness}, + committer::{reader::ParsedCommitment, Witness}, prover::Prover, statement::{Statement, Weights}, utils::{HintDeserialize, HintSerialize}, @@ -61,20 +64,16 @@ fn prove_axis( // Construct opening vectors for init/final GPA using Fiat-Shamir challenges. // Each opening encodes (address, value, timestamp) as: a*γ² + v*γ + t - τ - let init_vec: Vec<_> = (0..config.eq_memory.len()) - .map(|i| { - let a = FieldElement::from(i as u64); - let v = config.eq_memory[i]; - // Initial timestamp is always 0 (pre-access state) + let init_vec: Vec<_> = izip!(0.., config.eq_memory.iter(), config.final_timestamp.iter()) + .map(|(i, &v, _)| { + let a = FieldElement::from(i); a * gamma * gamma + v * gamma - tau }) .collect(); - let final_vec: Vec<_> = (0..config.eq_memory.len()) - .map(|i| { - let a = FieldElement::from(i as u64); - let v = config.eq_memory[i]; - let t = config.final_timestamp[i]; + let final_vec: Vec<_> = izip!(0.., config.eq_memory.iter(), config.final_timestamp.iter()) + .map(|(i, &v, &t)| { + let a = FieldElement::from(i); a * gamma * gamma + v * gamma + t - tau }) .collect(); @@ -95,23 +94,21 @@ fn prove_axis( )?; // RS WS GPA - let rs_vec: Vec<_> = (0..config.address.len()) - .map(|i| { - let a = config.address[i]; - let v = e_values[i]; - let t = config.read_timestamp[i]; - a * gamma * gamma + v * gamma + t - tau - }) - .collect(); + let rs_vec: Vec<_> = izip!( + config.address.iter(), + e_values.iter(), + config.read_timestamp.iter() + ) + .map(|(&a, &v, &t)| a * gamma * gamma + v * gamma + t - tau) + .collect(); - let ws_vec: Vec<_> = (0..config.address.len()) - .map(|i| { - let a = config.address[i]; - let v = e_values[i]; - let t = config.read_timestamp[i] + FieldElement::from(1); - a * gamma * gamma + v * gamma + t - tau - }) - .collect(); + let ws_vec: Vec<_> = izip!( + config.address.iter(), + e_values.iter(), + config.read_timestamp.iter() + ) + .map(|(&a, &v, &t)| a * gamma * gamma + v * gamma + (t + FieldElement::from(1)) - tau) + .collect(); let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); @@ -216,16 +213,18 @@ fn verify_axis( arthur: &mut VerifierState, num_axis_items: usize, num_nonzero_terms: usize, - whir_params: &SPARKWHIRConfigs, whir_config: &WhirConfig, + num_terms_3batched_config: &WhirConfig, + axis_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, + finalts_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, init_mem_fn: impl Fn(&[FieldElement]) -> FieldElement, ) -> Result<()> { - let commitment_reader = CommitmentReader::new(whir_config); - let a_3batched_reader = CommitmentReader::new(&whir_params.num_terms_3batched); - - let a_axis_commitment = a_3batched_reader.parse_commitment(arthur)?; - let a_finalts_commitment = commitment_reader.parse_commitment(arthur)?; - let mut tau_and_gamma = [FieldElement::from(0); 2]; arthur.fill_challenge_scalars(&mut tau_and_gamma)?; let tau = tau_and_gamma[0]; @@ -255,7 +254,7 @@ fn verify_axis( ); let final_cntr_verifier = Verifier::new(whir_config); - final_cntr_verifier.verify(arthur, &a_finalts_commitment, &final_cntr_statement)?; + final_cntr_verifier.verify(arthur, &finalts_commitment, &final_cntr_statement)?; let final_adr = calculate_adr(&evaluation_randomness.to_vec()); let final_mem = init_mem_fn(&evaluation_randomness.to_vec()); @@ -267,10 +266,7 @@ fn verify_axis( ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); // RS WS GPA - let gpa_result = gpa_sumcheck_verifier( - arthur, - provekit_common::utils::next_power_of_two(num_nonzero_terms) + 2, - )?; + let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_nonzero_terms) + 2)?; let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); let claimed_rs = gpa_result.claimed_values[0]; @@ -293,14 +289,14 @@ fn verify_axis( num_nonzero_terms, )); - let br = a_axis_commitment.batching_randomness; + let br = axis_commitment.batching_randomness; statement.add_constraint( Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec())), rs_adr + rs_mem * br + rs_timestamp * br * br, ); - let verifier = Verifier::new(&whir_params.num_terms_3batched); - verifier.verify(arthur, &a_axis_commitment, &statement)?; + let verifier = Verifier::new(num_terms_3batched_config); + verifier.verify(arthur, &axis_commitment, &statement)?; ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); @@ -317,14 +313,23 @@ pub fn verify_rowwise( num_nonzero_terms: usize, whir_params: &SPARKWHIRConfigs, request: &SparkStatement, - _matrix_batching_randomness: &FieldElement, + rowwise_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, + row_finalts_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, ) -> Result<()> { verify_axis( arthur, num_rows, num_nonzero_terms, - whir_params, &whir_params.row, + &whir_params.num_terms_3batched, + rowwise_commitment, + row_finalts_commitment, |eval_rand| calculate_eq(&request.point_to_evaluate.row, eval_rand), ) } @@ -335,14 +340,23 @@ pub fn verify_colwise( num_nonzero_terms: usize, whir_params: &SPARKWHIRConfigs, request: &SparkStatement, - _matrix_batching_randomness: &FieldElement, + colwise_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, + col_finalts_commitment: ParsedCommitment< + Fp, 4>, + Fp, 4>, + >, ) -> Result<()> { verify_axis( arthur, num_cols, num_nonzero_terms, - whir_params, &whir_params.col, + &whir_params.num_terms_3batched, + colwise_commitment, + col_finalts_commitment, |eval_rand| { calculate_eq(&request.point_to_evaluate.col[1..], eval_rand) * (FieldElement::from(1) - request.point_to_evaluate.col[0]) diff --git a/provekit/spark/src/preprocessing.rs b/provekit/spark/src/preprocessing.rs index 5c3a9cf0..b1ad190f 100644 --- a/provekit/spark/src/preprocessing.rs +++ b/provekit/spark/src/preprocessing.rs @@ -3,7 +3,7 @@ use { anyhow::Result, ark_ff::AdditiveGroup, provekit_common::{utils::next_power_of_two, FieldElement, R1CS}, - std::collections::BTreeMap, + std::{collections::BTreeMap, iter::repeat}, }; /// Preprocesses R1CS matrices into SPARK's memory-checkable COO format. @@ -158,35 +158,35 @@ impl MatrixPreprocessor { /// Combines A + α·B + α²·C into single SPARK matrix using batching /// randomness. - pub fn to_spark_matrix( - &self, + pub fn into_spark_matrix( + mut self, r1cs: &R1CS, matrix_batching_randomness: FieldElement, ) -> SparkMatrix { let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; - let mut combined_matrix_map = self.combined_matrix_map.clone(); - for (coordinate, value) in r1cs.a().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| { - *cur += value; - }); + self.combined_matrix_map + .entry(coordinate) + .and_modify(|cur| *cur += value); } - for (coordinate, value) in r1cs.b().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| { - *cur += value * matrix_batching_randomness; - }); + self.combined_matrix_map + .entry(coordinate) + .and_modify(|cur| { + *cur += value * matrix_batching_randomness; + }); } - for (coordinate, value) in r1cs.c().iter() { - combined_matrix_map.entry(coordinate).and_modify(|cur| { - *cur += value * matrix_batching_randomness_sq; - }); + self.combined_matrix_map + .entry(coordinate) + .and_modify(|cur| { + *cur += value * matrix_batching_randomness_sq; + }); } let mut val = Vec::with_capacity(self.padded_num_entries); - for value in combined_matrix_map.values() { + for value in self.combined_matrix_map.values() { val.push(*value); } let to_fill = self.padded_num_entries - self.original_num_entries; @@ -194,18 +194,18 @@ impl MatrixPreprocessor { SparkMatrix { coo: COOMatrix { - row: self.row.clone(), - col: self.col.clone(), + row: self.row, + col: self.col, val, - val_a: self.val_a.clone(), - val_b: self.val_b.clone(), - val_c: self.val_c.clone(), + val_a: self.val_a, + val_b: self.val_b, + val_c: self.val_c, }, timestamps: TimeStamps { - read_row: self.read_row.clone(), - read_col: self.read_col.clone(), - final_row: self.final_row.clone(), - final_col: self.final_col.clone(), + read_row: self.read_row, + read_col: self.read_col, + final_row: self.final_row, + final_col: self.final_col, }, } } @@ -223,8 +223,8 @@ impl MatrixPreprocessor { } let to_fill = self.padded_num_entries - self.original_num_entries; - e_rx.extend(std::iter::repeat(memory.eq_rx[0]).take(to_fill)); - e_ry.extend(std::iter::repeat(memory.eq_ry[0]).take(to_fill)); + e_rx.extend(repeat(memory.eq_rx[0]).take(to_fill)); + e_ry.extend(repeat(memory.eq_ry[0]).take(to_fill)); EValuesForMatrix { e_rx, e_ry } } diff --git a/provekit/spark/src/prover.rs b/provekit/spark/src/prover.rs index 3f04e572..48078011 100644 --- a/provekit/spark/src/prover.rs +++ b/provekit/spark/src/prover.rs @@ -3,7 +3,9 @@ use { memory::{prove_colwise, prove_rowwise}, preprocessing::MatrixPreprocessor, sumcheck::run_spark_sumcheck, - types::{EValuesForMatrix, MatrixDimensions, SPARKProof, SPARKWHIRConfigs}, + types::{ + EValuesForMatrix, MatrixDimensions, Memory, SPARKProof, SPARKWHIRConfigs, SparkMatrix, + }, utils::{calculate_memory, SPARKDomainSeparator}, }, anyhow::Result, @@ -15,10 +17,20 @@ use { FieldElement, IOPattern, WhirR1CSScheme, R1CS, }, provekit_r1cs_compiler::WhirR1CSSchemeBuilder, - spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + spongefish::{ + codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, + ProverState, + }, + std::collections::BTreeSet, whir::{ poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, - whir::{committer::CommitmentWriter, domainsep::WhirDomainSeparator, utils::HintSerialize}, + whir::{ + committer::{CommitmentWriter, Witness}, + domainsep::WhirDomainSeparator, + prover::Prover, + statement::{Statement, Weights}, + utils::HintSerialize, + }, }, }; @@ -41,7 +53,7 @@ impl SPARKScheme { let num_rows = r1cs.num_constraints(); let num_cols = r1cs.num_witnesses(); - let mut coordinates = std::collections::BTreeSet::new(); + let mut coordinates = BTreeSet::new(); for ((row, col), _) in r1cs.a().iter() { coordinates.insert((row, col)); } @@ -152,7 +164,7 @@ impl SPARKProver for SPARKScheme { let matrix_batching_randomness = matrix_batching_randomness[0]; let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; - let spark_matrix = processed.to_spark_matrix(r1cs, matrix_batching_randomness); + let spark_matrix = processed.into_spark_matrix(r1cs, matrix_batching_randomness); let claimed_value = request.claimed_values.a + request.claimed_values.b * matrix_batching_randomness @@ -178,9 +190,9 @@ impl SPARKProver for SPARKScheme { /// Core SPARK protocol: sumcheck + row/col memory checking. fn prove_spark_for_single_matrix( - merlin: &mut spongefish::ProverState, - matrix: crate::types::SparkMatrix, - memory: &crate::types::Memory, + merlin: &mut ProverState, + matrix: SparkMatrix, + memory: &Memory, e_values: EValuesForMatrix, claimed_value: FieldElement, whir_configs: &SPARKWHIRConfigs, @@ -297,7 +309,7 @@ fn commit_to_vector( >, merlin: &mut spongefish::ProverState, vector: Vec, -) -> whir::whir::committer::Witness { +) -> Witness { assert!( vector.len().is_power_of_two(), "Vector length must be power of two" @@ -315,13 +327,8 @@ fn produce_whir_proof( evaluation_point: MultilinearPoint, evaluated_value: FieldElement, config: provekit_common::WhirConfig, - witness: whir::whir::committer::Witness, + witness: Witness, ) -> Result<()> { - use whir::whir::{ - prover::Prover, - statement::{Statement, Weights}, - }; - let mut statement = Statement::::new(evaluation_point.num_variables()); statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); let prover = Prover::new(config); diff --git a/provekit/spark/src/verifier.rs b/provekit/spark/src/verifier.rs index f2ff139e..8e0e983a 100644 --- a/provekit/spark/src/verifier.rs +++ b/provekit/spark/src/verifier.rs @@ -91,11 +91,11 @@ fn verify_spark_single_matrix( let a_5batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_5batched); let a_sumcheck_commitment = a_5batched_commitment_reader.parse_commitment(arthur)?; - let _a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - let _a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; + let a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - let _a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur)?; - let _a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur)?; + let a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur)?; + let a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur)?; let (randomness, a_last_sumcheck_value) = run_sumcheck_verifier_spark( arthur, @@ -144,7 +144,8 @@ fn verify_spark_single_matrix( matrix_dimensions.nonzero_terms, whir_params, request, - matrix_batching_randomness, + a_rowwise_commitment, + a_row_finalts_commitment, )?; verify_colwise( @@ -153,7 +154,8 @@ fn verify_spark_single_matrix( matrix_dimensions.nonzero_terms, whir_params, request, - matrix_batching_randomness, + a_colwise_commitment, + a_col_finalts_commitment, )?; Ok(()) diff --git a/tooling/spark-cli/src/cmd/prove.rs b/tooling/spark-cli/src/cmd/prove.rs index 78491f83..9c9dd271 100644 --- a/tooling/spark-cli/src/cmd/prove.rs +++ b/tooling/spark-cli/src/cmd/prove.rs @@ -1,8 +1,8 @@ use { anyhow::{Context, Result}, argh::FromArgs, - provekit_common::{file::read, NoirProof}, - provekit_spark::{deserialize_r1cs, SPARKProofGnark, SPARKProver, SPARKProverScheme}, + provekit_common::{file::read, utils::next_power_of_two, NoirProof, NoirProofScheme}, + provekit_spark::{SPARKProofGnark, SPARKProver, SPARKProverScheme}, std::{fs::File, io::Write, path::PathBuf}, }; @@ -10,9 +10,9 @@ use { #[argh(subcommand, name = "prove")] #[argh(description = "Generate a SPARK proof")] pub struct ProveArgs { - /// path to R1CS file + /// path to NPS file #[argh(option)] - r1cs: PathBuf, + noir_proof_scheme: PathBuf, /// path to NoirProof file (.np or .json) containing the SPARK statement #[argh(option)] @@ -28,8 +28,15 @@ pub struct ProveArgs { } pub fn execute(args: ProveArgs) -> Result<()> { - println!("Loading R1CS from {:?}...", args.r1cs); - let r1cs = deserialize_r1cs(&args.r1cs).context("Failed to load R1CS")?; + println!("Loading R1CS from {:?}...", args.noir_proof_scheme); + let scheme: NoirProofScheme = + read(&args.noir_proof_scheme).context("while reading Noir proof scheme")?; + let mut r1cs = scheme.r1cs.clone(); + r1cs.grow_matrices( + 1 << next_power_of_two(r1cs.num_constraints()), + 1 << next_power_of_two(r1cs.num_witnesses()), + ); + drop(scheme); println!("Loading NoirProof from {:?}...", args.noir_proof); let noir_proof: NoirProof = read(&args.noir_proof).context("Failed to read NoirProof file")?; diff --git a/tooling/spark-cli/src/cmd/verify.rs b/tooling/spark-cli/src/cmd/verify.rs index 5a4357c2..0ca79687 100644 --- a/tooling/spark-cli/src/cmd/verify.rs +++ b/tooling/spark-cli/src/cmd/verify.rs @@ -1,7 +1,8 @@ use { anyhow::{Context, Result}, argh::FromArgs, - provekit_spark::{deserialize_request, SPARKProof, SPARKVerifier, SPARKVerifierScheme}, + provekit_common::{file::read, NoirProof}, + provekit_spark::{SPARKProof, SPARKVerifier, SPARKVerifierScheme}, std::{fs, path::PathBuf}, }; @@ -11,28 +12,32 @@ use { pub struct VerifyArgs { /// path to proof file #[argh(option)] - proof: PathBuf, + spark_proof: PathBuf, - /// path to statement file + /// path to NoirProof file (.np or .json) containing the SPARK statement #[argh(option)] - statement: PathBuf, + noir_proof: PathBuf, } pub fn execute(args: VerifyArgs) -> Result<()> { - println!("Loading proof from {:?}...", args.proof); - let proof_str = fs::read_to_string(&args.proof).context("Failed to read proof file")?; + println!("Loading spark-proof from {:?}...", args.spark_proof); + let proof_str = fs::read_to_string(&args.spark_proof).context("Failed to read proof file")?; let proof: SPARKProof = - serde_json::from_str(&proof_str).context("Failed to deserialize proof")?; + serde_json::from_str(&proof_str).context("Failed to deserialize spark-proof")?; - println!("Loading statement from {:?}...", args.statement); - let statement = deserialize_request(&args.statement).context("Failed to load statement")?; + println!("Loading NoirProof from {:?}...", args.noir_proof); + let noir_proof: NoirProof = read(&args.noir_proof).context("Failed to read NoirProof file")?; + + println!("✓ Extracted SPARK statement from NoirProof"); + let spark_statement = noir_proof.spark_statement.clone(); + drop(noir_proof); println!("Creating verification scheme..."); let scheme = SPARKVerifierScheme::from_proof(&proof); println!("Verifying proof..."); scheme - .verify(&proof, &statement) + .verify(&proof, &spark_statement) .context("Verification failed")?; println!("✓ Proof verified successfully"); From 775b2ef609fe577a67f62fe9b051450db6abfe82 Mon Sep 17 00:00:00 2001 From: shreyas-londhe Date: Sat, 11 Oct 2025 18:04:06 +0530 Subject: [PATCH 32/34] chore: remove spark-prover --- spark-prover/Cargo.toml | 26 - spark-prover/README.md | 37 -- spark-prover/src/bin/generate_test_r1cs.rs | 30 -- spark-prover/src/bin/generate_test_request.rs | 31 -- spark-prover/src/bin/spark-verifier.rs | 448 ------------------ spark-prover/src/lib.rs | 1 - spark-prover/src/main.rs | 367 -------------- spark-prover/src/utilities/gpa.rs | 186 -------- spark-prover/src/utilities/iopattern.rs | 26 - spark-prover/src/utilities/matrix.rs | 23 - spark-prover/src/utilities/memory.rs | 33 -- spark-prover/src/utilities/mod.rs | 92 ---- spark-prover/src/utilities/spark.rs | 404 ---------------- spark-prover/src/utilities/whir.rs | 71 --- 14 files changed, 1775 deletions(-) delete mode 100644 spark-prover/Cargo.toml delete mode 100644 spark-prover/README.md delete mode 100644 spark-prover/src/bin/generate_test_r1cs.rs delete mode 100644 spark-prover/src/bin/generate_test_request.rs delete mode 100644 spark-prover/src/bin/spark-verifier.rs delete mode 100644 spark-prover/src/lib.rs delete mode 100644 spark-prover/src/main.rs delete mode 100644 spark-prover/src/utilities/gpa.rs delete mode 100644 spark-prover/src/utilities/iopattern.rs delete mode 100644 spark-prover/src/utilities/matrix.rs delete mode 100644 spark-prover/src/utilities/memory.rs delete mode 100644 spark-prover/src/utilities/mod.rs delete mode 100644 spark-prover/src/utilities/spark.rs delete mode 100644 spark-prover/src/utilities/whir.rs diff --git a/spark-prover/Cargo.toml b/spark-prover/Cargo.toml deleted file mode 100644 index 06657d15..00000000 --- a/spark-prover/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "spark-prover" -version = "0.1.0" -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[dependencies] -argh = "0.1.12" -provekit-common.workspace = true -provekit-r1cs-compiler.workspace = true -serde_json.workspace = true -serde.workspace = true -anyhow.workspace = true -spongefish.workspace = true -whir.workspace = true -ark-std.workspace = true -ark-ff.workspace = true -itertools = "0.14.0" - -[lints] -workspace = true - diff --git a/spark-prover/README.md b/spark-prover/README.md deleted file mode 100644 index 94afe423..00000000 --- a/spark-prover/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# SPARK -Experimental Rust prover and gnark recursive prover circuit will be implemented and optimized here. - -## Running -``` -noirup --version v1.0.0-beta.11 -cd noir-examples/noir-passport-examples/complete_age_check -nargo compile -cargo run --release --bin provekit-cli prepare ./target/complete_age_check.json -o ./noir-proof-scheme.nps -cargo run --release --bin provekit-cli prove ./noir-proof-scheme.nps ./Prover.toml -o ./noir-proof.np -cargo run --release --bin provekit-cli generate-gnark-inputs ./noir-proof-scheme.nps ./noir-proof.np -cd ../../.. -cargo run --bin spark-prover -- --r1cs "noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --request "noir-examples/noir-passport-examples/complete_age_check/spark_request.json" -cargo run -p spark-prover --bin spark-verifier -- --proof "spark-prover/spark_proof.json" --request "noir-examples/noir-passport-examples/complete_age_check/spark_request.json" -cd recursive-verifier/cmd/cli -go run . --config "../../../noir-examples/noir-passport-examples/complete_age_check/params_for_recursive_verifier" --r1cs "../../../noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --evaluation spark --spark_config "../../../spark-prover/gnark_spark_proof.json" -``` - -## Running SPARK (under development) -```cargo run --bin spark-prover``` - -## Test R1CS generation (for development) -A development utility is provided to generate test matrices. -To generate a test R1CS, run the following command: - -```cargo run -p spark-prover --bin generate_test_r1cs``` - -## Test request generation (for development) -A development utility is provided to generate test requests. -To generate a test request, run the following command: - -```cargo run -p spark-prover --bin generate_test_request``` - -## Reference SPARK verifier (for development) -A reference SPARK verifier is implemented to test the correctness of the SPARK proof while being a reference implementation for the gnark verifier circuit. - -```cargo run -p spark-prover --bin spark-verifier``` \ No newline at end of file diff --git a/spark-prover/src/bin/generate_test_r1cs.rs b/spark-prover/src/bin/generate_test_r1cs.rs deleted file mode 100644 index 53bbe81d..00000000 --- a/spark-prover/src/bin/generate_test_r1cs.rs +++ /dev/null @@ -1,30 +0,0 @@ -use { - provekit_common::{FieldElement, R1CS}, - std::{fs::File, io::Write}, -}; - -fn main() { - let mut r1cs = R1CS::new(); - r1cs.grow_matrices(256, 256); - let interned_1 = r1cs.interner.intern(FieldElement::from(1)); - let interned_2 = r1cs.interner.intern(FieldElement::from(2)); - let interned_3 = r1cs.interner.intern(FieldElement::from(3)); - - for i in 0..256 { - r1cs.a.set(i, i, interned_1); - r1cs.b.set(i, i, interned_2); - r1cs.c.set(i, i, interned_3); - } - - r1cs.a.set(1, 0, interned_1); - r1cs.a.set(2, 0, interned_1); - r1cs.a.set(3, 0, interned_1); - - let matrix_json = - serde_json::to_string(&r1cs).expect("Error: Failed to serialize R1CS to JSON"); - let mut request_file = - File::create("spark-prover/r1cs.json").expect("Error: Failed to create the r1cs.json file"); - request_file - .write_all(matrix_json.as_bytes()) - .expect("Error: Failed to write JSON data to r1cs.json"); -} diff --git a/spark-prover/src/bin/generate_test_request.rs b/spark-prover/src/bin/generate_test_request.rs deleted file mode 100644 index 682ae62d..00000000 --- a/spark-prover/src/bin/generate_test_request.rs +++ /dev/null @@ -1,31 +0,0 @@ -use { - provekit_common::{ - spark::{ClaimedValues, Point, SparkStatement}, - FieldElement, - }, - std::{fs::File, io::Write}, -}; - -fn main() { - let mut row = vec![FieldElement::from(0); 8]; - let col = vec![FieldElement::from(0); 9]; - - row[7] = FieldElement::from(1); - - let spark_request = SparkStatement { - point_to_evaluate: Point { row, col }, - claimed_values: ClaimedValues { - a: FieldElement::from(1), - b: FieldElement::from(0), - c: FieldElement::from(0), - }, - }; - - let request_json = - serde_json::to_string(&spark_request).expect("Error: Failed to serialize R1CS to JSON"); - let mut request_file = File::create("spark-prover/request.json") - .expect("Error: Failed to create the request.json file"); - request_file - .write_all(request_json.as_bytes()) - .expect("Error: Failed to write JSON data to request.json"); -} diff --git a/spark-prover/src/bin/spark-verifier.rs b/spark-prover/src/bin/spark-verifier.rs deleted file mode 100644 index 34395742..00000000 --- a/spark-prover/src/bin/spark-verifier.rs +++ /dev/null @@ -1,448 +0,0 @@ -use { - anyhow::{ensure, Context, Result}, - argh::FromArgs, - ark_std::{One, Zero}, - provekit_common::{ - skyscraper::SkyscraperSponge, - spark::SparkStatement, - utils::{ - next_power_of_two, - sumcheck::{calculate_eq, eval_cubic_poly}, - }, - FieldElement, IOPattern, - }, - spark_prover::utilities::{whir::SPARKWHIRConfigsNew, SPARKProof}, - spongefish::{ - codecs::arkworks_algebra::{FieldToUnitDeserialize, UnitToField}, - VerifierState, - }, - std::{ - fs::{self}, - path::PathBuf, - }, - whir::{ - poly_utils::multilinear::MultilinearPoint, - whir::{ - committer::CommitmentReader, - statement::{Statement, Weights}, - utils::HintDeserialize, - verifier::Verifier, - }, - }, -}; - -#[derive(FromArgs)] -#[argh(description = "Spark Verifier CLI")] -struct Args { - /// request - #[argh(option)] - request: PathBuf, - - /// proof - #[argh(option)] - proof: PathBuf, -} - -fn main() -> Result<()> { - let args: Args = argh::from_env(); - - let spark_proof_json_str = - fs::read_to_string(args.proof).context("Error: Failed to open the proof file")?; - let spark_proof: SPARKProof = serde_json::from_str(&spark_proof_json_str) - .context("Error: Failed to deserialize proof")?; - - let request_json_str = - fs::read_to_string(args.request).context("Error: Failed to open the request file")?; - let request: SparkStatement = - serde_json::from_str(&request_json_str).context("Error: Failed to deserialize request")?; - - let io = IOPattern::from_string(spark_proof.io_pattern.clone()); - let mut arthur = io.to_verifier_state(&spark_proof.transcript); - - let _point_row: Vec = arthur.hint()?; - let _point_col: Vec = arthur.hint()?; - - let mut claimed_values = [FieldElement::from(0); 3]; - arthur.fill_next_scalars(&mut claimed_values)?; - - let mut matrix_batching_randomness = [FieldElement::from(0); 1]; - arthur.fill_challenge_scalars(&mut matrix_batching_randomness)?; - let matrix_batching_randomness = matrix_batching_randomness[0]; - - let claimed_value = claimed_values[0] - + claimed_values[1] * matrix_batching_randomness - + claimed_values[2] * matrix_batching_randomness * matrix_batching_randomness; - - verify_spark_single_matrix( - &matrix_batching_randomness, - &spark_proof.whir_params, - spark_proof.matrix_dimensions.num_rows, - spark_proof.matrix_dimensions.num_cols, - spark_proof.matrix_dimensions.nonzero_terms, - &mut arthur, - &request, - &claimed_value, - )?; - - Ok(()) -} - -pub fn verify_spark_single_matrix( - matrix_batching_randomness: &FieldElement, - whir_params: &SPARKWHIRConfigsNew, - num_rows: usize, - num_cols: usize, - num_nonzero_terms: usize, - arthur: &mut VerifierState, - request: &SparkStatement, - claimed_value: &FieldElement, -) -> Result<()> { - let commitment_reader_row = CommitmentReader::new(&whir_params.row); - let commitment_reader_col = CommitmentReader::new(&whir_params.col); - - // Matrix A - - let a_3batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_3batched); - let a_5batched_commitment_reader = CommitmentReader::new(&whir_params.num_terms_5batched); - - let a_sumcheck_commitment = a_5batched_commitment_reader.parse_commitment(arthur)?; - let a_rowwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - let a_colwise_commitment = a_3batched_commitment_reader.parse_commitment(arthur)?; - - let a_row_finalts_commitment = commitment_reader_row.parse_commitment(arthur).unwrap(); - let a_col_finalts_commitment = commitment_reader_col.parse_commitment(arthur).unwrap(); - - // Matrix A - Sumcheck - - let (randomness, a_last_sumcheck_value) = - run_sumcheck_verifier_spark(arthur, next_power_of_two(num_nonzero_terms), *claimed_value) - .context("While verifying SPARK sumcheck")?; - - let final_folds: Vec = arthur.hint()?; - - let claimed_val = final_folds[0] - + final_folds[1] * matrix_batching_randomness - + final_folds[2] * matrix_batching_randomness * matrix_batching_randomness; - assert!(a_last_sumcheck_value == claimed_val * final_folds[3] * final_folds[4]); - - let mut a_spark_sumcheck_statement_verifier = - Statement::::new(next_power_of_two(num_nonzero_terms)); - - let mut batching_randomness = Vec::with_capacity(5); - let mut cur = FieldElement::from(1); - for _ in 0..5 { - batching_randomness.push(cur); - cur *= a_sumcheck_commitment.batching_randomness; - } - - a_spark_sumcheck_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(randomness.clone())), - final_folds[0] * batching_randomness[0] - + final_folds[1] * batching_randomness[1] - + final_folds[2] * batching_randomness[2] - + final_folds[3] * batching_randomness[3] - + final_folds[4] * batching_randomness[4], - ); - - let a_spark_sumcheck_verifier = Verifier::new(&whir_params.num_terms_5batched); - a_spark_sumcheck_verifier.verify( - arthur, - &a_sumcheck_commitment, - &a_spark_sumcheck_statement_verifier, - )?; - - // Matrix A - Rowwise - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_rows) + 2)?; - - let claimed_init = gpa_result.claimed_values[0]; - let claimed_final = gpa_result.claimed_values[1]; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - let init_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); - let init_cntr = FieldElement::from(0); - - let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - let final_cntr: FieldElement = arthur.hint()?; - - let mut final_cntr_statement = Statement::::new(next_power_of_two(num_rows)); - final_cntr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - final_cntr, - ); - - let final_cntr_verifier = Verifier::new(&whir_params.row); - final_cntr_verifier - .verify(arthur, &a_row_finalts_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; - - let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - let final_mem = calculate_eq( - &request.point_to_evaluate.row, - &evaluation_randomness.to_vec(), - ); - - let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - - let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - + final_opening * last_randomness[0]; - - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - - let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_nonzero_terms) + 2)?; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let claimed_rs = gpa_result.claimed_values[0]; - let claimed_ws = gpa_result.claimed_values[1]; - - let rs_adr: FieldElement = arthur.hint()?; - let rs_mem: FieldElement = arthur.hint()?; - let rs_timestamp: FieldElement = arthur.hint()?; - - let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = - rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - - let evaluated_value = - rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - - let mut a_spark_rowwise_statement_verifier = - Statement::::new(next_power_of_two(num_nonzero_terms)); - - a_spark_rowwise_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr - + rs_mem * a_rowwise_commitment.batching_randomness - + rs_timestamp - * a_rowwise_commitment.batching_randomness - * a_rowwise_commitment.batching_randomness, - ); - - let a_rowwise_verifier = Verifier::new(&whir_params.num_terms_3batched); - a_rowwise_verifier.verify( - arthur, - &a_rowwise_commitment, - &a_spark_rowwise_statement_verifier, - )?; - - ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); - - // Matrix A - Colwise - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - arthur.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - // Colwise Init Final GPA - - let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_cols) + 2)?; - - let claimed_init = gpa_result.claimed_values[0]; - let claimed_final = gpa_result.claimed_values[1]; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let init_adr = calculate_adr(&evaluation_randomness.to_vec()); - let init_mem = calculate_eq( - &request.point_to_evaluate.col[1..], - &evaluation_randomness.to_vec(), - ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - let init_cntr = FieldElement::from(0); - - let init_opening = init_adr * gamma * gamma + init_mem * gamma + init_cntr - tau; - - let final_cntr: FieldElement = arthur.hint()?; - - let mut final_cntr_statement = Statement::::new(next_power_of_two(num_cols)); - final_cntr_statement.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - final_cntr, - ); - - let final_cntr_verifier = Verifier::new(&whir_params.col); - final_cntr_verifier - .verify(arthur, &a_col_finalts_commitment, &final_cntr_statement) - .context("while verifying WHIR")?; - - let final_adr = calculate_adr(&evaluation_randomness.to_vec()); - let final_mem = calculate_eq( - &request.point_to_evaluate.col[1..], - &evaluation_randomness.to_vec(), - ) * (FieldElement::from(1) - request.point_to_evaluate.col[0]); - - let final_opening = final_adr * gamma * gamma + final_mem * gamma + final_cntr - tau; - - let evaluated_value = init_opening * (FieldElement::one() - last_randomness[0]) - + final_opening * last_randomness[0]; - - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - - // Colwise RS WS GPA - - let gpa_result = gpa_sumcheck_verifier(arthur, next_power_of_two(num_nonzero_terms) + 2)?; - - let (last_randomness, evaluation_randomness) = gpa_result.randomness.split_at(1); - - let claimed_rs = gpa_result.claimed_values[0]; - let claimed_ws = gpa_result.claimed_values[1]; - - let rs_adr: FieldElement = arthur.hint()?; - let rs_mem: FieldElement = arthur.hint()?; - let rs_timestamp: FieldElement = arthur.hint()?; - - let rs_opening = rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp - tau; - let ws_opening = - rs_adr * gamma * gamma + rs_mem * gamma + rs_timestamp + FieldElement::from(1) - tau; - - let evaluated_value = - rs_opening * (FieldElement::one() - last_randomness[0]) + ws_opening * last_randomness[0]; - - ensure!(evaluated_value == gpa_result.a_last_sumcheck_value); - - let mut a_spark_colwise_statement_verifier = - Statement::::new(next_power_of_two(num_nonzero_terms)); - - a_spark_colwise_statement_verifier.add_constraint( - Weights::evaluation(MultilinearPoint(evaluation_randomness.to_vec().clone())), - rs_adr - + rs_mem * a_colwise_commitment.batching_randomness - + rs_timestamp - * a_colwise_commitment.batching_randomness - * a_colwise_commitment.batching_randomness, - ); - - let a_colwise_verifier = Verifier::new(&whir_params.num_terms_3batched); - a_colwise_verifier.verify( - arthur, - &a_colwise_commitment, - &a_spark_colwise_statement_verifier, - )?; - - ensure!(claimed_init * claimed_ws == claimed_rs * claimed_final); - - Ok(()) -} - -pub fn run_sumcheck_verifier_spark( - arthur: &mut VerifierState, - variable_count: usize, - initial_sumcheck_val: FieldElement, -) -> Result<(Vec, FieldElement)> { - let mut saved_val_for_sumcheck_equality_assertion = initial_sumcheck_val; - - let mut alpha = vec![FieldElement::zero(); variable_count]; - - for i in 0..variable_count { - let mut hhat_i = [FieldElement::zero(); 4]; - let mut alpha_i = [FieldElement::zero(); 1]; - let _ = arthur.fill_next_scalars(&mut hhat_i); - let _ = arthur.fill_challenge_scalars(&mut alpha_i); - alpha[i] = alpha_i[0]; - - let hhat_i_at_zero = eval_cubic_poly(&hhat_i, &FieldElement::zero()); - let hhat_i_at_one = eval_cubic_poly(&hhat_i, &FieldElement::one()); - ensure!( - saved_val_for_sumcheck_equality_assertion == hhat_i_at_zero + hhat_i_at_one, - "Sumcheck equality assertion failed" - ); - saved_val_for_sumcheck_equality_assertion = eval_cubic_poly(&hhat_i, &alpha_i[0]); - } - - Ok((alpha, saved_val_for_sumcheck_equality_assertion)) -} - -pub fn gpa_sumcheck_verifier( - arthur: &mut VerifierState, - height_of_binary_tree: usize, -) -> Result { - let mut prev_rand; - let mut rand = Vec::::new(); - let mut claimed_values = [FieldElement::from(0); 2]; - let mut l = [FieldElement::from(0); 2]; - let mut r = [FieldElement::from(0); 1]; - let mut h = [FieldElement::from(0); 4]; - let mut alpha = [FieldElement::from(0); 1]; - - arthur - .fill_next_scalars(&mut claimed_values) - .expect("Failed to fill next scalars"); - arthur - .fill_challenge_scalars(&mut r) - .expect("Failed to fill next scalars"); - let mut a_last_sumcheck_value = eval_linear_poly(&claimed_values, &r[0]); - rand.push(r[0]); - prev_rand = rand; - rand = Vec::::new(); - - for i in 1..height_of_binary_tree - 1 { - for _ in 0..i { - arthur - .fill_next_scalars(&mut h) - .expect("Failed to fill next scalars"); - arthur - .fill_challenge_scalars(&mut alpha) - .expect("Failed to fill next scalars"); - assert_eq!( - eval_cubic_poly(&h, &FieldElement::from(0)) - + eval_cubic_poly(&h, &FieldElement::from(1)), - a_last_sumcheck_value - ); - rand.push(alpha[0]); - a_last_sumcheck_value = eval_cubic_poly(&h, &alpha[0]); - } - arthur - .fill_next_scalars(&mut l) - .expect("Failed to fill next scalars"); - arthur - .fill_challenge_scalars(&mut r) - .expect("Failed to fill next scalars"); - let claimed_last_sch = calculate_eq(&prev_rand, &rand) - * eval_linear_poly(&l, &FieldElement::from(0)) - * eval_linear_poly(&l, &FieldElement::from(1)); - assert_eq!(claimed_last_sch, a_last_sumcheck_value); - rand.push(r[0]); - prev_rand = rand; - rand = Vec::::new(); - a_last_sumcheck_value = eval_linear_poly(&l, &r[0]); - } - - Ok(GPASumcheckResult { - claimed_values: claimed_values.to_vec(), - a_last_sumcheck_value, - randomness: prev_rand, - }) -} - -pub struct GPASumcheckResult { - pub claimed_values: Vec, - pub a_last_sumcheck_value: FieldElement, - pub randomness: Vec, -} - -pub fn eval_linear_poly(poly: &[FieldElement], point: &FieldElement) -> FieldElement { - poly[0] + *point * poly[1] -} - -pub fn calculate_adr(alpha: &Vec) -> FieldElement { - let mut ans = FieldElement::from(0); - let mut mult = FieldElement::from(1); - for a in alpha.iter().rev() { - ans = ans + *a * mult; - mult = mult * FieldElement::from(2); - } - ans -} diff --git a/spark-prover/src/lib.rs b/spark-prover/src/lib.rs deleted file mode 100644 index 89db1662..00000000 --- a/spark-prover/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod utilities; diff --git a/spark-prover/src/main.rs b/spark-prover/src/main.rs deleted file mode 100644 index ff15fb69..00000000 --- a/spark-prover/src/main.rs +++ /dev/null @@ -1,367 +0,0 @@ -use { - anyhow::{Context, Result}, - argh::FromArgs, - ark_ff::AdditiveGroup, - provekit_common::{ - gnark::WHIRConfigGnark, - utils::{next_power_of_two, sumcheck::SumcheckIOPattern}, - FieldElement, IOPattern, WhirR1CSScheme, - }, - provekit_r1cs_compiler::WhirR1CSSchemeBuilder, - spark_prover::utilities::{ - deserialize_r1cs, deserialize_request, - iopattern::SPARKDomainSeparator, - matrix::{COOMatrix, SparkMatrix, TimeStamps}, - memory::{calculate_memory, EValuesForMatrix}, - spark::prove_spark_for_single_matrix, - whir::SPARKWHIRConfigsNew, - MatrixDimensionsNew, SPARKProof, SPARKProofGnarkNew, - }, - spongefish::codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, - std::{collections::BTreeMap, fs::File, io::Write, path::PathBuf}, - whir::whir::{domainsep::WhirDomainSeparator, utils::HintSerialize}, -}; - -#[derive(FromArgs)] -#[argh(description = "Spark Prover CLI")] -struct Args { - /// r1cs - #[argh(option)] - r1cs: PathBuf, - - /// request - #[argh(option)] - request: PathBuf, -} -fn main() -> Result<()> { - let args: Args = argh::from_env(); - - let r1cs = deserialize_r1cs(&args.r1cs).context("Error: Failed to create the R1CS object")?; - - // get combined matrix non-zero value coordinates - - let mut combined_matrix_map: BTreeMap<(usize, usize), FieldElement> = r1cs - .a() - .iter() - .map(|(coordinate, _)| (coordinate, FieldElement::ZERO)) - .collect(); - for (coordinate, _) in r1cs.b().iter() { - combined_matrix_map - .entry(coordinate) - .or_insert(FieldElement::ZERO); - } - for (coordinate, _) in r1cs.c().iter() { - combined_matrix_map - .entry(coordinate) - .or_insert(FieldElement::ZERO); - } - - // generate padded row and col - - let originial_num_entries = combined_matrix_map.keys().count(); - let padded_num_entries = 1 << next_power_of_two(combined_matrix_map.keys().count()); - - let mut row = Vec::with_capacity(padded_num_entries); - let mut col = Vec::with_capacity(padded_num_entries); - - for (r, c) in combined_matrix_map.keys() { - row.push(FieldElement::from(*r as u64)); - col.push(FieldElement::from(*c as u64)); - } - - let to_fill = padded_num_entries - originial_num_entries; - row.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); - col.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); - - // generate val vectors - - let mut val_a = vec![FieldElement::ZERO; padded_num_entries]; - let mut val_b = vec![FieldElement::ZERO; padded_num_entries]; - let mut val_c = vec![FieldElement::ZERO; padded_num_entries]; - - let a_binding = r1cs.a(); - let b_binding = r1cs.b(); - let c_binding = r1cs.c(); - - let mut a_iter = a_binding.iter(); - let mut b_iter = b_binding.iter(); - let mut c_iter = c_binding.iter(); - - let mut a_cur = a_iter.next(); - let mut b_cur = b_iter.next(); - let mut c_cur = c_iter.next(); - - for (index, coordinate) in combined_matrix_map.keys().enumerate() { - if let Some((coord, value)) = a_cur { - if coord == *coordinate { - val_a[index] = value; - a_cur = a_iter.next(); - } - } - - if let Some((coord, value)) = b_cur { - if coord == *coordinate { - val_b[index] = value; - b_cur = b_iter.next(); - } - } - - if let Some((coord, value)) = c_cur { - if coord == *coordinate { - val_c[index] = value; - c_cur = c_iter.next(); - } - } - } - - // generate padded timestamps - - let mut read_row_counters = vec![0; r1cs.num_constraints()]; - let mut read_col_counters = vec![0; r1cs.num_witnesses()]; - let mut read_row = Vec::with_capacity(padded_num_entries); - let mut read_col = Vec::with_capacity(padded_num_entries); - - for (r, c) in combined_matrix_map.keys() { - read_row.push(FieldElement::from(read_row_counters[*r] as u64)); - read_col.push(FieldElement::from(read_col_counters[*c] as u64)); - read_row_counters[*r] += 1; - read_col_counters[*c] += 1; - } - - for _ in 0..to_fill { - read_row.push(FieldElement::from(read_row_counters[0] as u64)); - read_col.push(FieldElement::from(read_col_counters[0] as u64)); - read_row_counters[0] += 1; - read_col_counters[0] += 1; - } - - let final_row = read_row_counters - .iter() - .map(|&x| FieldElement::from(x as u64)) - .collect::>(); - - let final_col = read_col_counters - .iter() - .map(|&x| FieldElement::from(x as u64)) - .collect::>(); - - // Run for each request - let request = deserialize_request(&args.request) - .context("Error: Failed to deserialize the request object")?; - - let memory = calculate_memory(request.point_to_evaluate.clone()); - - let mut e_rx = Vec::with_capacity(padded_num_entries); - let mut e_ry = Vec::with_capacity(padded_num_entries); - - for (r, c) in combined_matrix_map.keys() { - e_rx.push(memory.eq_rx[*r]); - e_ry.push(memory.eq_ry[*c]); - } - - e_rx.extend(std::iter::repeat(memory.eq_rx[0]).take(to_fill)); - e_ry.extend(std::iter::repeat(memory.eq_ry[0]).take(to_fill)); - - // Create whir config - - let row_config = - WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_constraints()), 1); - let col_config = - WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(r1cs.num_witnesses()), 1); - let num_terms_3batched_config = - WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 3); - let num_terms_5batched_config = - WhirR1CSScheme::new_whir_config_for_size(next_power_of_two(padded_num_entries), 5); - - // Create io_pattern - let mut io = IOPattern::new("💥"); - - // Matrix A - io = io - .hint("point_row") - .hint("point_col") - .add_claimed_evaluations(); - - io = io - .commit_statement(&num_terms_5batched_config) - .commit_statement(&num_terms_3batched_config) - .commit_statement(&num_terms_3batched_config) - .commit_statement(&row_config) - .commit_statement(&col_config) - .add_sumcheck_polynomials(next_power_of_two(padded_num_entries)) - .hint("sumcheck_last_folds") - .add_whir_proof(&num_terms_5batched_config); - - // Rowwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.num_constraints()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_final_counter_claimed_evaluation") - .add_whir_proof(&row_config); - - for i in 0..=next_power_of_two(padded_num_entries) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("row_rs_address_claimed_evaluation") - .hint("row_rs_value_claimed_evaluation") - .hint("row_rs_timestamp_claimed_evaluation") - .add_whir_proof(&num_terms_3batched_config); - - // Colwise - - io = io.add_tau_and_gamma(); - - for i in 0..=next_power_of_two(r1cs.num_witnesses()) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_final_counter_claimed_evaluation") - .add_whir_proof(&col_config); - - for i in 0..=next_power_of_two(padded_num_entries) { - io = io.add_sumcheck_polynomials(i); - io = io.add_line(); - } - - io = io - .hint("col_rs_address_claimed_evaluation") - .hint("col_rs_value_claimed_evaluation") - .hint("col_rs_timestamp_claimed_evaluation") - .add_whir_proof(&num_terms_3batched_config); - - // Prover - - let mut merlin = io.to_prover_state(); - - merlin.hint(&request.point_to_evaluate.row)?; - merlin.hint(&request.point_to_evaluate.col)?; - - // Calculate the RLC of the matrices - // Note: can be also calculated from rlc of val_a, val_b, val_c - merlin.add_scalars(&[ - request.claimed_values.a, - request.claimed_values.b, - request.claimed_values.c, - ])?; - let mut matrix_batching_randomness = [FieldElement::ZERO; 1]; - merlin.fill_challenge_scalars(&mut matrix_batching_randomness)?; - let matrix_batching_randomness = matrix_batching_randomness[0]; - let matrix_batching_randomness_sq = matrix_batching_randomness * matrix_batching_randomness; - - for (coordinate, value) in r1cs.a().iter() { - combined_matrix_map - .entry(coordinate) - .and_modify(|cur| *cur += value); - } - - for (coordinate, value) in r1cs.b().iter() { - combined_matrix_map - .entry(coordinate) - .and_modify(|cur| *cur += value * matrix_batching_randomness); - } - - for (coordinate, value) in r1cs.c().iter() { - combined_matrix_map - .entry(coordinate) - .and_modify(|cur| *cur += value * matrix_batching_randomness_sq); - } - - let mut val = Vec::with_capacity(padded_num_entries); - for value in combined_matrix_map.values() { - val.push(*value); - } - val.extend(std::iter::repeat(FieldElement::ZERO).take(to_fill)); - - let claimed_value = request.claimed_values.a - + request.claimed_values.b * matrix_batching_randomness - + request.claimed_values.c * matrix_batching_randomness_sq; - - // - - let spark_matrix = SparkMatrix { - coo: COOMatrix { - row, - col, - val, - val_a, - val_b, - val_c, - }, - timestamps: TimeStamps { - read_row, - read_col, - final_row, - final_col, - }, - }; - - let e_values = EValuesForMatrix { e_rx, e_ry }; - - let configs = SPARKWHIRConfigsNew { - row: row_config, - col: col_config, - num_terms_3batched: num_terms_3batched_config, - num_terms_5batched: num_terms_5batched_config, - }; - - prove_spark_for_single_matrix( - &mut merlin, - spark_matrix, - &memory, - e_values, - claimed_value, - &configs, - )?; - - let spark_proof = SPARKProof { - transcript: merlin.narg_string().to_vec(), - io_pattern: String::from_utf8(io.as_bytes().to_vec()).unwrap(), - whir_params: configs, - matrix_dimensions: MatrixDimensionsNew { - num_rows: r1cs.num_constraints(), - num_cols: r1cs.num_witnesses(), - nonzero_terms: originial_num_entries, - }, - }; - - let mut spark_proof_file = File::create("spark-prover/spark_proof.json") - .context("Error: Failed to create the spark proof file")?; - - spark_proof_file - .write_all(serde_json::to_string(&spark_proof).unwrap().as_bytes()) - .expect("Writing gnark parameters to a file failed"); - - let spark_proof_gnark = SPARKProofGnarkNew { - transcript: spark_proof.transcript, - io_pattern: spark_proof.io_pattern, - whir_row: WHIRConfigGnark::new(&spark_proof.whir_params.row), - whir_col: WHIRConfigGnark::new(&spark_proof.whir_params.col), - whir_3batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_3batched), - whir_5batched: WHIRConfigGnark::new(&spark_proof.whir_params.num_terms_5batched), - log_num_terms: next_power_of_two(padded_num_entries), - }; - - let mut gnark_spark_proof_file = File::create("spark-prover/gnark_spark_proof.json") - .context("Error: Failed to create the spark proof file")?; - - gnark_spark_proof_file - .write_all( - serde_json::to_string(&spark_proof_gnark) - .unwrap() - .as_bytes(), - ) - .expect("Writing spark gnark parameters to a file failed"); - - Ok(()) -} diff --git a/spark-prover/src/utilities/gpa.rs b/spark-prover/src/utilities/gpa.rs deleted file mode 100644 index ed87ea3b..00000000 --- a/spark-prover/src/utilities/gpa.rs +++ /dev/null @@ -1,186 +0,0 @@ -use { - provekit_common::{ - skyscraper::SkyscraperSponge, - utils::{ - next_power_of_two, - sumcheck::{ - calculate_evaluations_over_boolean_hypercube_for_eq, eval_cubic_poly, - sumcheck_fold_map_reduce, - }, - HALF, - }, - FieldElement, - }, - spongefish::{ - codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, - ProverState, - }, - whir::poly_utils::evals::EvaluationsList, -}; - -// TODO: Fix gpa and add line integration - -pub fn run_gpa( - merlin: &mut ProverState, - left: &Vec, - right: &Vec, -) -> Vec { - let mut h = left.clone(); - h.extend(right.iter().cloned()); - let layers = calculate_binary_multiplication_tree(h); - - let mut saved_val_for_sumcheck_equality_assertion; - let mut r; - let mut line_evaluations; - let mut alpha = Vec::::new(); - - (r, saved_val_for_sumcheck_equality_assertion) = add_line_to_merlin(merlin, layers[1].clone()); - - for i in 2..layers.len() { - (line_evaluations, alpha) = run_gpa_sumcheck( - merlin, - &r, - layers[i].clone(), - saved_val_for_sumcheck_equality_assertion, - alpha, - ); - (r, saved_val_for_sumcheck_equality_assertion) = - add_line_to_merlin(merlin, line_evaluations.to_vec()); - } - - alpha.push(r[0]); - - return alpha; -} - -fn calculate_binary_multiplication_tree( - array_to_prove: Vec, -) -> Vec> { - assert!(array_to_prove.len() == 1 << next_power_of_two(array_to_prove.len())); - let mut layers = vec![]; - let mut current_layer = array_to_prove; - - while current_layer.len() > 1 { - let mut next_layer = vec![]; - - for i in (0..current_layer.len()).step_by(2) { - let product = current_layer[i] * current_layer[i + 1]; - next_layer.push(product); - } - - layers.push(current_layer); - current_layer = next_layer; - } - - layers.push(current_layer); - layers.reverse(); - layers -} - -fn add_line_to_merlin( - merlin: &mut ProverState, - arr: Vec, -) -> ([FieldElement; 1], FieldElement) { - let l_evaluations = EvaluationsList::new(arr); - let l_temp = l_evaluations.to_coeffs(); - let l: &[FieldElement] = l_temp.coeffs(); - merlin.add_scalars(&l).expect("Failed to add l"); - - let mut r = [FieldElement::from(0); 1]; - merlin - .fill_challenge_scalars(&mut r) - .expect("Failed to add a challenge scalar"); - - let saved_val_for_sumcheck_equality_assertion = l[0] + l[1] * r[0]; - - (r, saved_val_for_sumcheck_equality_assertion) -} - -fn run_gpa_sumcheck( - merlin: &mut ProverState, - r: &[FieldElement; 1], - layer: Vec, - mut saved_val_for_sumcheck_equality_assertion: FieldElement, - mut alpha: Vec, -) -> ([FieldElement; 2], Vec) { - let (mut v0, mut v1) = split_by_index(layer); - alpha.push(r[0]); - let mut eq_r = calculate_evaluations_over_boolean_hypercube_for_eq(&alpha); - let mut alpha_i_wrapped_in_vector = [FieldElement::from(0)]; - let mut alpha = Vec::::new(); - let mut fold = None; - - loop { - let [hhat_i_at_0, hhat_i_at_em1, hhat_i_at_inf_over_x_cube] = - sumcheck_fold_map_reduce([&mut eq_r, &mut v0, &mut v1], fold, |[eq_r, v0, v1]| { - [ - // Evaluation at 0 - eq_r.0 * v0.0 * v1.0, - // Evaluation at -1 - (eq_r.0 + eq_r.0 - eq_r.1) * (v0.0 + v0.0 - v0.1) * (v1.0 + v1.0 - v1.1), - // Evaluation at infinity - (eq_r.1 - eq_r.0) * (v0.1 - v0.0) * (v1.1 - v1.0), - ] - }); - - if fold.is_some() { - eq_r.truncate(eq_r.len() / 2); - v0.truncate(v0.len() / 2); - v1.truncate(v1.len() / 2); - } - - let mut hhat_i_coeffs = [FieldElement::from(0); 4]; - - hhat_i_coeffs[0] = hhat_i_at_0; - hhat_i_coeffs[2] = HALF - * (saved_val_for_sumcheck_equality_assertion + hhat_i_at_em1 - - hhat_i_at_0 - - hhat_i_at_0 - - hhat_i_at_0); - hhat_i_coeffs[3] = hhat_i_at_inf_over_x_cube; - hhat_i_coeffs[1] = saved_val_for_sumcheck_equality_assertion - - hhat_i_coeffs[0] - - hhat_i_coeffs[0] - - hhat_i_coeffs[3] - - hhat_i_coeffs[2]; - - assert_eq!( - saved_val_for_sumcheck_equality_assertion, - hhat_i_coeffs[0] - + hhat_i_coeffs[0] - + hhat_i_coeffs[1] - + hhat_i_coeffs[2] - + hhat_i_coeffs[3] - ); - - let _ = merlin.add_scalars(&hhat_i_coeffs[..]); - let _ = merlin.fill_challenge_scalars(&mut alpha_i_wrapped_in_vector); - fold = Some(alpha_i_wrapped_in_vector[0]); - saved_val_for_sumcheck_equality_assertion = - eval_cubic_poly(&hhat_i_coeffs, &alpha_i_wrapped_in_vector[0]); - alpha.push(alpha_i_wrapped_in_vector[0]); - if eq_r.len() <= 2 { - break; - } - } - - let folded_v0 = v0[0] + (v0[1] - v0[0]) * alpha_i_wrapped_in_vector[0]; - let folded_v1 = v1[0] + (v1[1] - v1[0]) * alpha_i_wrapped_in_vector[0]; - - ([folded_v0, folded_v1], alpha) -} - -fn split_by_index(input: Vec) -> (Vec, Vec) { - let mut even_indexed = Vec::new(); - let mut odd_indexed = Vec::new(); - - for (i, item) in input.into_iter().enumerate() { - if i % 2 == 0 { - even_indexed.push(item); - } else { - odd_indexed.push(item); - } - } - - (even_indexed, odd_indexed) -} diff --git a/spark-prover/src/utilities/iopattern.rs b/spark-prover/src/utilities/iopattern.rs deleted file mode 100644 index ffa728bc..00000000 --- a/spark-prover/src/utilities/iopattern.rs +++ /dev/null @@ -1,26 +0,0 @@ -use {provekit_common::FieldElement, spongefish::codecs::arkworks_algebra::FieldDomainSeparator}; - -pub trait SPARKDomainSeparator { - fn add_tau_and_gamma(self) -> Self; - fn add_line(self) -> Self; - fn add_claimed_evaluations(self) -> Self; -} - -impl SPARKDomainSeparator for IOPattern -where - IOPattern: FieldDomainSeparator, -{ - fn add_tau_and_gamma(self) -> Self { - self.challenge_scalars(2, "tau and gamma") - } - - fn add_line(self) -> Self { - self.add_scalars(2, "gpa line") - .challenge_scalars(1, "gpa line random") - } - - fn add_claimed_evaluations(self) -> Self { - self.add_scalars(3, "claimed evaluations") - .challenge_scalars(1, "matrix combination randomness") - } -} diff --git a/spark-prover/src/utilities/matrix.rs b/spark-prover/src/utilities/matrix.rs deleted file mode 100644 index 31f02778..00000000 --- a/spark-prover/src/utilities/matrix.rs +++ /dev/null @@ -1,23 +0,0 @@ -use provekit_common::FieldElement; - -#[derive(Debug)] -pub struct SparkMatrix { - pub coo: COOMatrix, - pub timestamps: TimeStamps, -} -#[derive(Debug)] -pub struct COOMatrix { - pub row: Vec, - pub col: Vec, - pub val: Vec, - pub val_a: Vec, - pub val_b: Vec, - pub val_c: Vec, -} -#[derive(Debug)] -pub struct TimeStamps { - pub read_row: Vec, - pub read_col: Vec, - pub final_row: Vec, - pub final_col: Vec, -} diff --git a/spark-prover/src/utilities/memory.rs b/spark-prover/src/utilities/memory.rs deleted file mode 100644 index e3b6420e..00000000 --- a/spark-prover/src/utilities/memory.rs +++ /dev/null @@ -1,33 +0,0 @@ -use provekit_common::{ - spark::Point, utils::sumcheck::calculate_evaluations_over_boolean_hypercube_for_eq, - FieldElement, -}; - -#[derive(Debug)] -pub struct Memory { - pub eq_rx: Vec, - pub eq_ry: Vec, -} - -#[derive(Debug)] -pub struct EValuesForMatrix { - pub e_rx: Vec, - pub e_ry: Vec, -} - -#[derive(Debug)] -pub struct EValues { - pub a: EValuesForMatrix, - pub b: EValuesForMatrix, - pub c: EValuesForMatrix, -} - -pub fn calculate_memory(point_to_evaluate: Point) -> Memory { - Memory { - eq_rx: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.row), - eq_ry: calculate_evaluations_over_boolean_hypercube_for_eq(&point_to_evaluate.col[1..]) - .iter() - .map(|x| *x * (FieldElement::from(1) - point_to_evaluate.col[0])) - .collect(), - } -} diff --git a/spark-prover/src/utilities/mod.rs b/spark-prover/src/utilities/mod.rs deleted file mode 100644 index fd8fd888..00000000 --- a/spark-prover/src/utilities/mod.rs +++ /dev/null @@ -1,92 +0,0 @@ -pub mod gpa; -pub mod iopattern; -pub mod matrix; -pub mod memory; -pub mod spark; -pub mod whir; - -use { - crate::utilities::whir::SPARKWHIRConfigsNew, - anyhow::{Context, Result}, - provekit_common::{ - gnark::WHIRConfigGnark, spark::SparkStatement, utils::next_power_of_two, R1CS, - }, - serde::{Deserialize, Serialize}, - std::{fs, path::PathBuf}, -}; - -pub fn deserialize_r1cs(path: &PathBuf) -> Result { - let json_str = fs::read_to_string(path).context("Error: Failed to open the r1cs.json file")?; - let mut r1cs: R1CS = - serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS")?; - r1cs.grow_matrices( - 1 << next_power_of_two(r1cs.num_constraints()), - 1 << next_power_of_two(r1cs.num_witnesses()), - ); - Ok(r1cs) -} - -pub fn deserialize_request(path: &PathBuf) -> Result { - let json_str = - fs::read_to_string(path).context("Error: Failed to open the request.json file")?; - serde_json::from_str(&json_str).context("Error: Failed to deserialize JSON to R1CS") -} - -#[derive(Serialize, Deserialize)] -pub struct SPARKProof { - pub transcript: Vec, - pub io_pattern: String, - pub whir_params: SPARKWHIRConfigsNew, - pub matrix_dimensions: MatrixDimensionsNew, -} - -#[derive(Serialize, Deserialize)] -pub struct MatrixDimensions { - pub num_rows: usize, - pub num_cols: usize, - pub a_nonzero_terms: usize, - pub b_nonzero_terms: usize, - pub c_nonzero_terms: usize, -} - -#[derive(Serialize, Deserialize)] -pub struct MatrixDimensionsNew { - pub num_rows: usize, - pub num_cols: usize, - pub nonzero_terms: usize, -} - -pub fn calculate_matrix_dimensions(r1cs: &R1CS) -> MatrixDimensions { - MatrixDimensions { - num_rows: r1cs.a.num_rows, - num_cols: r1cs.a.num_cols, - a_nonzero_terms: r1cs.a.num_entries(), - b_nonzero_terms: r1cs.b.num_entries(), - c_nonzero_terms: r1cs.c.num_entries(), - } -} - -#[derive(Serialize, Deserialize)] -pub struct SPARKProofGnark { - pub transcript: Vec, - pub io_pattern: String, - pub whir_row: WHIRConfigGnark, - pub whir_col: WHIRConfigGnark, - pub whir_a3: WHIRConfigGnark, - pub whir_b3: WHIRConfigGnark, - pub whir_c3: WHIRConfigGnark, - pub log_a_num_terms: usize, - pub log_b_num_terms: usize, - pub log_c_num_terms: usize, -} - -#[derive(Serialize, Deserialize)] -pub struct SPARKProofGnarkNew { - pub transcript: Vec, - pub io_pattern: String, - pub whir_row: WHIRConfigGnark, - pub whir_col: WHIRConfigGnark, - pub whir_3batched: WHIRConfigGnark, - pub whir_5batched: WHIRConfigGnark, - pub log_num_terms: usize, -} diff --git a/spark-prover/src/utilities/spark.rs b/spark-prover/src/utilities/spark.rs deleted file mode 100644 index 7818a3b6..00000000 --- a/spark-prover/src/utilities/spark.rs +++ /dev/null @@ -1,404 +0,0 @@ -use { - crate::utilities::{ - gpa::run_gpa, - matrix::SparkMatrix, - memory::{EValuesForMatrix, Memory}, - whir::{commit_to_vector, produce_whir_proof, SPARKWHIRConfigsNew}, - }, - anyhow::Result, - itertools::izip, - provekit_common::{ - skyscraper::SkyscraperSponge, - utils::{ - sumcheck::{eval_cubic_poly, sumcheck_fold_map_reduce}, - HALF, - }, - FieldElement, - }, - spongefish::{ - codecs::arkworks_algebra::{FieldToUnitSerialize, UnitToField}, - ProverState, - }, - whir::{ - poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, - whir::{committer::CommitmentWriter, utils::HintSerialize}, - }, -}; - -pub fn prove_spark_for_single_matrix( - merlin: &mut ProverState, - matrix: SparkMatrix, - memory: &Memory, - e_values: EValuesForMatrix, - claimed_value: FieldElement, - whir_configs: &SPARKWHIRConfigsNew, -) -> Result<()> { - let row_committer = CommitmentWriter::new(whir_configs.row.clone()); - let col_committer = CommitmentWriter::new(whir_configs.col.clone()); - let batched3_committer = CommitmentWriter::new(whir_configs.num_terms_3batched.clone()); - let batched5_committer = CommitmentWriter::new(whir_configs.num_terms_5batched.clone()); - - let sumcheck_witness = batched5_committer.commit_batch(merlin, &[ - EvaluationsList::new(matrix.coo.val_a.clone()).to_coeffs(), - EvaluationsList::new(matrix.coo.val_b.clone()).to_coeffs(), - EvaluationsList::new(matrix.coo.val_c.clone()).to_coeffs(), - EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), - EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), - ])?; - - let rowwise_witness = batched3_committer.commit_batch(merlin, &[ - EvaluationsList::new(matrix.coo.row.clone()).to_coeffs(), - EvaluationsList::new(e_values.e_rx.clone()).to_coeffs(), - EvaluationsList::new(matrix.timestamps.read_row.clone()).to_coeffs(), - ])?; - - let colwise_witness = batched3_committer.commit_batch(merlin, &[ - EvaluationsList::new(matrix.coo.col.clone()).to_coeffs(), - EvaluationsList::new(e_values.e_ry.clone()).to_coeffs(), - EvaluationsList::new(matrix.timestamps.read_col.clone()).to_coeffs(), - ])?; - - let final_row_ts_witness = - commit_to_vector(&row_committer, merlin, matrix.timestamps.final_row.clone()); - let final_col_ts_witness = - commit_to_vector(&col_committer, merlin, matrix.timestamps.final_col.clone()); - - // Sumcheck - - let mles = [ - matrix.coo.val.clone(), - e_values.e_rx.clone(), - e_values.e_ry.clone(), - ]; - - let (sumcheck_final_folds, folding_randomness) = - run_spark_sumcheck(merlin, mles, claimed_value)?; - - let val_a_eval = EvaluationsList::new(matrix.coo.val_a.clone()) - .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); - let val_b_eval = EvaluationsList::new(matrix.coo.val_b.clone()) - .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); - let val_c_eval = EvaluationsList::new(matrix.coo.val_c.clone()) - .evaluate(&MultilinearPoint(folding_randomness.to_vec().clone())); - - merlin.hint::>( - &[ - val_a_eval, - val_b_eval, - val_c_eval, - sumcheck_final_folds[1], - sumcheck_final_folds[2], - ] - .to_vec(), - )?; - - let mut batching_randomness = Vec::with_capacity(5); - let mut cur = FieldElement::from(1); - for _ in 0..5 { - batching_randomness.push(cur); - cur *= sumcheck_witness.batching_randomness; - } - - let claimed_batched_value = val_a_eval * batching_randomness[0] - + val_b_eval * batching_randomness[1] - + val_c_eval * batching_randomness[2] - + sumcheck_final_folds[1] * batching_randomness[3] - + sumcheck_final_folds[2] * batching_randomness[4]; - - produce_whir_proof( - merlin, - MultilinearPoint(folding_randomness.to_vec()), - claimed_batched_value, - whir_configs.num_terms_5batched.clone(), - sumcheck_witness, - )?; - - // Rowwise - - // Rowwise Init Final GPA - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let init_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let init_value = memory.eq_rx.clone(); - let init_timestamp = vec![FieldElement::from(0); memory.eq_rx.len()]; - - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let final_address: Vec = (0..memory.eq_rx.len() as u64) - .map(FieldElement::from) - .collect(); - let final_value = memory.eq_rx.clone(); - let final_timestamp = matrix.timestamps.final_row.clone(); - - let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let final_row_eval = EvaluationsList::new(matrix.timestamps.final_row.clone()) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&final_row_eval)?; - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_row_eval, - whir_configs.row.clone(), - final_row_ts_witness, - )?; - - // // Rowwise RS WS GPA - - let rs_address = matrix.coo.row.clone(); - let rs_value = e_values.e_rx.clone(); - let rs_timestamp = matrix.timestamps.read_row.clone(); - - let rs_vec: Vec = - izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let ws_address = matrix.coo.row.clone(); - let ws_value = e_values.e_rx.clone(); - let ws_timestamp: Vec = matrix - .timestamps - .read_row - .into_iter() - .map(|a| a + FieldElement::from(1)) - .collect(); - - let ws_vec: Vec = - izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let rs_address_eval = EvaluationsList::new(rs_address) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_address_eval)?; - - let rs_value_eval = EvaluationsList::new(rs_value) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_value_eval)?; - - let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_timestamp_eval)?; - - let claimed_rowwise_eval = rs_address_eval - + rs_value_eval * rowwise_witness.batching_randomness - + rs_timestamp_eval - * rowwise_witness.batching_randomness - * rowwise_witness.batching_randomness; - - assert!( - claimed_rowwise_eval - == rowwise_witness - .batched_poly() - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec())) - ); - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - claimed_rowwise_eval, - whir_configs.num_terms_3batched.clone(), - rowwise_witness, - )?; - - // Colwise - - // Colwise Init Final GPA - - let mut tau_and_gamma = [FieldElement::from(0); 2]; - merlin.fill_challenge_scalars(&mut tau_and_gamma)?; - let tau = tau_and_gamma[0]; - let gamma = tau_and_gamma[1]; - - let init_address: Vec = (0..memory.eq_ry.len() as u64) - .map(FieldElement::from) - .collect(); - let init_value = memory.eq_ry.clone(); - let init_timestamp = vec![FieldElement::from(0); memory.eq_ry.len()]; - - let init_vec: Vec = izip!(init_address, init_value, init_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let final_address: Vec = (0..memory.eq_ry.len() as u64) - .map(FieldElement::from) - .collect(); - let final_value = memory.eq_ry.clone(); - let final_timestamp = matrix.timestamps.final_col.clone(); - - let final_vec: Vec = izip!(final_address, final_value, final_timestamp) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &init_vec, &final_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let final_col_eval = EvaluationsList::new(matrix.timestamps.final_col.clone()) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&final_col_eval)?; - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - final_col_eval, - whir_configs.col.clone(), - final_col_ts_witness, - )?; - - // // Colwise RS WS GPA - - let rs_address = matrix.coo.col.clone(); - let rs_value = e_values.e_ry.clone(); - let rs_timestamp = matrix.timestamps.read_col.clone(); - - let rs_vec: Vec = - izip!(rs_address.clone(), rs_value.clone(), rs_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let ws_address = matrix.coo.col.clone(); - let ws_value = e_values.e_ry.clone(); - let ws_timestamp: Vec = matrix - .timestamps - .read_col - .into_iter() - .map(|a| a + FieldElement::from(1)) - .collect(); - - let ws_vec: Vec = - izip!(ws_address.clone(), ws_value.clone(), ws_timestamp.clone()) - .map(|(a, v, t)| a * gamma * gamma + v * gamma + t - tau) - .collect(); - - let gpa_randomness = run_gpa(merlin, &rs_vec, &ws_vec); - - let (_combination_randomness, evaluation_randomness) = gpa_randomness.split_at(1); - - let rs_address_eval = EvaluationsList::new(rs_address) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_address_eval)?; - - let rs_value_eval = EvaluationsList::new(rs_value) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_value_eval)?; - - let rs_timestamp_eval = EvaluationsList::new(rs_timestamp) - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec().clone())); - merlin.hint(&rs_timestamp_eval)?; - - let claimed_colwise_eval = rs_address_eval - + rs_value_eval * colwise_witness.batching_randomness - + rs_timestamp_eval - * colwise_witness.batching_randomness - * colwise_witness.batching_randomness; - - assert!( - claimed_colwise_eval - == colwise_witness - .batched_poly() - .evaluate(&MultilinearPoint(evaluation_randomness.to_vec())) - ); - - produce_whir_proof( - merlin, - MultilinearPoint(evaluation_randomness.to_vec()), - claimed_colwise_eval, - whir_configs.num_terms_3batched.clone(), - colwise_witness, - )?; - - Ok(()) -} - -pub fn run_spark_sumcheck( - merlin: &mut ProverState, - mles: [Vec; 3], - mut claimed_value: FieldElement, -) -> Result<([FieldElement; 3], Vec)> { - let mut sumcheck_randomness = [FieldElement::from(0)]; - let mut sumcheck_randomness_accumulator = Vec::::new(); - let mut fold = None; - - let mut m0 = mles[0].clone(); - let mut m1 = mles[1].clone(); - let mut m2 = mles[2].clone(); - - loop { - let [hhat_i_at_0, hhat_i_at_em1, hhat_i_at_inf_over_x_cube] = - sumcheck_fold_map_reduce([&mut m0, &mut m1, &mut m2], fold, |[m0, m1, m2]| { - [ - // Evaluation at 0 - m0.0 * m1.0 * m2.0, - // Evaluation at -1 - (m0.0 + m0.0 - m0.1) * (m1.0 + m1.0 - m1.1) * (m2.0 + m2.0 - m2.1), - // Evaluation at infinity - (m0.1 - m0.0) * (m1.1 - m1.0) * (m2.1 - m2.0), - ] - }); - - if fold.is_some() { - m0.truncate(m0.len() / 2); - m1.truncate(m1.len() / 2); - m2.truncate(m2.len() / 2); - } - - let mut hhat_i_coeffs = [FieldElement::from(0); 4]; - - hhat_i_coeffs[0] = hhat_i_at_0; - hhat_i_coeffs[2] = - HALF * (claimed_value + hhat_i_at_em1 - hhat_i_at_0 - hhat_i_at_0 - hhat_i_at_0); - hhat_i_coeffs[3] = hhat_i_at_inf_over_x_cube; - hhat_i_coeffs[1] = claimed_value - - hhat_i_coeffs[0] - - hhat_i_coeffs[0] - - hhat_i_coeffs[3] - - hhat_i_coeffs[2]; - - assert_eq!( - claimed_value, - hhat_i_coeffs[0] - + hhat_i_coeffs[0] - + hhat_i_coeffs[1] - + hhat_i_coeffs[2] - + hhat_i_coeffs[3] - ); - - merlin.add_scalars(&hhat_i_coeffs[..])?; - merlin.fill_challenge_scalars(&mut sumcheck_randomness)?; - fold = Some(sumcheck_randomness[0]); - claimed_value = eval_cubic_poly(&hhat_i_coeffs, &sumcheck_randomness[0]); - sumcheck_randomness_accumulator.push(sumcheck_randomness[0]); - if m0.len() <= 2 { - break; - } - } - - let folded_v0 = m0[0] + (m0[1] - m0[0]) * sumcheck_randomness[0]; - let folded_v1 = m1[0] + (m1[1] - m1[0]) * sumcheck_randomness[0]; - let folded_v2 = m2[0] + (m2[1] - m2[0]) * sumcheck_randomness[0]; - - Ok(( - [folded_v0, folded_v1, folded_v2], - sumcheck_randomness_accumulator, - )) -} diff --git a/spark-prover/src/utilities/whir.rs b/spark-prover/src/utilities/whir.rs deleted file mode 100644 index 645e8189..00000000 --- a/spark-prover/src/utilities/whir.rs +++ /dev/null @@ -1,71 +0,0 @@ -use { - anyhow::{Context, Result}, - provekit_common::{ - skyscraper::{SkyscraperMerkleConfig, SkyscraperPoW, SkyscraperSponge}, - FieldElement, WhirConfig, - }, - serde::{Deserialize, Serialize}, - spongefish::ProverState, - whir::{ - poly_utils::{evals::EvaluationsList, multilinear::MultilinearPoint}, - whir::{ - committer::{CommitmentWriter, Witness}, - prover::Prover, - statement::{Statement, Weights}, - }, - }, -}; - -pub fn commit_to_vector( - committer: &CommitmentWriter, - merlin: &mut ProverState, - vector: Vec, -) -> Witness { - assert!( - vector.len().is_power_of_two(), - "Committed vector length must be a power of two" - ); - let evals = EvaluationsList::new(vector); - let coeffs = evals.to_coeffs(); - committer - .commit(merlin, coeffs) - .expect("WHIR prover failed to commit") -} - -#[derive(Serialize, Deserialize)] -pub struct SPARKWHIRConfigs { - pub row: WhirConfig, - pub col: WhirConfig, - pub a: WhirConfig, - pub b: WhirConfig, - pub c: WhirConfig, - pub a_3batched: WhirConfig, - pub b_3batched: WhirConfig, - pub c_3batched: WhirConfig, -} - -#[derive(Serialize, Deserialize)] -pub struct SPARKWHIRConfigsNew { - pub row: WhirConfig, - pub col: WhirConfig, - pub num_terms_3batched: WhirConfig, - pub num_terms_5batched: WhirConfig, -} - -pub fn produce_whir_proof( - merlin: &mut ProverState, - evaluation_point: MultilinearPoint, - evaluated_value: FieldElement, - config: WhirConfig, - witness: Witness, -) -> Result<()> { - let mut statement = Statement::::new(evaluation_point.num_variables()); - statement.add_constraint(Weights::evaluation(evaluation_point), evaluated_value); - let prover = Prover::new(config); - - prover - .prove(merlin, statement, witness) - .context("while generating WHIR proof")?; - - Ok(()) -} From 65fb227ef66aa1f07434167e27574ca5e1329469 Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 22 Oct 2025 11:35:39 +0800 Subject: [PATCH 33/34] Modify end-to-end workflow --- .github/workflows/end-to-end.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/end-to-end.yml b/.github/workflows/end-to-end.yml index 29e4b44f..cea3e305 100644 --- a/.github/workflows/end-to-end.yml +++ b/.github/workflows/end-to-end.yml @@ -85,7 +85,7 @@ jobs: MONITOR_PID=$! # Run the main process - ./gnark-verifier --config "../noir-examples/noir-passport-examples/complete_age_check/params_for_recursive_verifier" --r1cs "../noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --pk_url ${{ vars.AGE_CHECK_PK }} --vk_url ${{ vars.AGE_CHECK_VK }} + ./gnark-verifier --evaluation spark --config "../../../noir-examples/noir-passport-examples/complete_age_check/params_for_recursive_verifier" --spark_config "../../../noir-examples/noir-passport-examples/complete_age_check/gnark_spark_proof.json" --r1cs "../../../noir-examples/noir-passport-examples/complete_age_check/r1cs.json" --pk_url ${{ vars.AGE_CHECK_PK }} --vk_url ${{ vars.AGE_CHECK_VK }} # Stop monitoring kill $MONITOR_PID \ No newline at end of file From 7cb9210fa65991d3e9156c60bfccb7d9bf2bad9e Mon Sep 17 00:00:00 2001 From: Batmend Batsaikhan Date: Wed, 22 Oct 2025 11:49:02 +0800 Subject: [PATCH 34/34] Adds sparkConfig to server --- recursive-verifier/cmd/server/main.go | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/recursive-verifier/cmd/server/main.go b/recursive-verifier/cmd/server/main.go index 88d28c41..df0e1af5 100644 --- a/recursive-verifier/cmd/server/main.go +++ b/recursive-verifier/cmd/server/main.go @@ -97,6 +97,18 @@ func verify(c *fiber.Ctx) error { return fmt.Errorf("failed to unmarshal config JSON: %w", err) } + // TODO: Swap spark_config_name_placeholder with actual form field name + sparkConfigFile, err := getFile(c, "spark_config_name_placeholder") + if err != nil { + log.Printf("Failed to get config file: %v", err) + return c.Status(400).SendString("Failed to get spark config file") + } + + var sparkConfig circuit.SparkConfig + if err := json.Unmarshal(sparkConfigFile, &sparkConfig); err != nil { + return fmt.Errorf("failed to unmarshal config JSON: %w", err) + } + var pk *groth16.ProvingKey var vk *groth16.VerifyingKey