Skip to content

Commit

Permalink
Merge pull request #4 from EYBlockchain/livesey/ignore_tests
Browse files Browse the repository at this point in the history
ignore unnecessary tests
  • Loading branch information
Michael-EY authored Nov 29, 2024
2 parents bf04eac + 74efc37 commit 44e8a7b
Show file tree
Hide file tree
Showing 21 changed files with 97 additions and 89 deletions.
10 changes: 6 additions & 4 deletions plonkish_backend/src/accumulation/protostar/hyperplonk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -595,8 +595,8 @@ pub(crate) mod test {
HyperPlonk,
},
pcs::{
multilinear::{Gemini, MultilinearIpa, MultilinearKzg, Zeromorph},
univariate::UnivariateKzg,
multilinear::{Gemini, Zeromorph},
univariate::{UnivariateIpa, UnivariateKzg},
},
util::{
expression::rotate::BinaryField,
Expand Down Expand Up @@ -641,8 +641,10 @@ pub(crate) mod test {
};
}

tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
tests!(kzg, MultilinearKzg<Bn256>);
//tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
//tests!(kzg, MultilinearKzg<Bn256>);
tests!(gemini_kzg, Gemini<UnivariateKzg<Bn256>>);
tests!(zeromorph_kzg, Zeromorph<UnivariateKzg<Bn256>>);
tests!(gemini_ipa, Gemini<UnivariateIpa<grumpkin::G1Affine>>);
//tests!(zeromorph_ipa, Zeromorph<UnivariateIpa<grumpkin::G1Affine>>);
}
Original file line number Diff line number Diff line change
Expand Up @@ -352,13 +352,10 @@ pub(crate) fn lookup_constraints<F: PrimeField>(
let [m, h_input, h_table] = &[m, h, h + 1]
.map(|poly| Query::new(poly, Rotation::cur()))
.map(Expression::<F>::Polynomial);
let (inputs, tables) = lookup
.iter()
.map(|(input, table)| (input, table))
.unzip::<_, _, Vec<_>, Vec<_>>();
let (inputs, tables) = lookup.iter().cloned().unzip::<_, _, Vec<_>, Vec<_>>();
let [input, table] = &[inputs, tables].map(|exprs| {
chain![
exprs.first().cloned().cloned(),
exprs.first().cloned(),
exprs
.into_iter()
.skip(1)
Expand Down
10 changes: 6 additions & 4 deletions plonkish_backend/src/accumulation/sangria/hyperplonk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ pub(crate) mod test {
HyperPlonk,
},
pcs::{
multilinear::{Gemini, MultilinearIpa, MultilinearKzg, Zeromorph},
univariate::UnivariateKzg,
multilinear::{Gemini, Zeromorph},
univariate::{UnivariateIpa, UnivariateKzg},
},
util::{
expression::rotate::BinaryField,
Expand Down Expand Up @@ -53,8 +53,10 @@ pub(crate) mod test {
};
}

tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
tests!(kzg, MultilinearKzg<Bn256>);
//tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
//tests!(kzg, MultilinearKzg<Bn256>);
tests!(gemini_kzg, Gemini<UnivariateKzg<Bn256>>);
tests!(zeromorph_kzg, Zeromorph<UnivariateKzg<Bn256>>);
tests!(gemini_ipa, Gemini<UnivariateIpa<grumpkin::G1Affine>>);
//tests!(zeromorph_ipa, Zeromorph<UnivariateIpa<grumpkin::G1Affine>>);
}
37 changes: 16 additions & 21 deletions plonkish_backend/src/backend/hyperplonk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ where
let mut witness_polys = Vec::with_capacity(pp.num_witness_polys.iter().sum());
let mut witness_comms = Vec::with_capacity(witness_polys.len());
let mut challenges = Vec::with_capacity(pp.num_challenges.iter().sum::<usize>() + 4);
// For each round, generate multi-linear polynomials from witness columns and commit
// For each round, generate multi-linear polynomials from witness columns and commit
for (round, (num_witness_polys, num_challenges)) in pp
.num_witness_polys
.iter()
Expand All @@ -162,7 +162,7 @@ where
// beta is used to compress the polynomials in the lookup argument
let beta = transcript.squeeze_challenge();

// Generate a compressed multilinear polynomial for each lookup in the vector of lookups
// Generate a compressed multilinear polynomial for each lookup in the vector of lookups
let timer = start_timer(|| format!("lookup_compressed_polys-{}", pp.lookups.len()));
let lookup_compressed_polys = {
let max_lookup_width = pp.lookups.iter().map(Vec::len).max().unwrap_or_default();
Expand Down Expand Up @@ -218,7 +218,7 @@ where
]
.collect_vec();
challenges.extend([beta, gamma, alpha]);
// Prove the zero check is satisfied for the expression wrt the polynomials
// Prove the zero check is satisfied for the expression wrt the polynomials
let (points, evals) = prove_zero_check(
pp.num_instances.len(),
&pp.expression,
Expand All @@ -241,7 +241,7 @@ where
]
.collect_vec();
let timer = start_timer(|| format!("pcs_batch_open-{}", evals.len()));
// Open all polynomials at the points from the zero check and give the opening proofs
// Open all polynomials at the points from the zero check and give the opening proofs
Pcs::batch_open(&pp.pcs, polys, comms, &points, &evals, transcript)?;
end_timer(timer);
// Proof is saved in transcript
Expand Down Expand Up @@ -298,7 +298,7 @@ where
let y = transcript.squeeze_challenges(vp.num_vars);

challenges.extend([beta, gamma, alpha]);
// Verify the zero check for the constraints defined in the expression
// Verify the zero check for the constraints defined in the expression
let (points, evals) = verify_zero_check(
vp.num_vars,
&vp.expression,
Expand All @@ -320,7 +320,7 @@ where
&lookup_h_permutation_z_comms,
]
.collect_vec();
// Verify the opening proofs for the polynomials commitments
// Verify the opening proofs for the polynomials commitments
Pcs::batch_verify(&vp.pcs, comms, &points, &evals, transcript)?;

Ok(())
Expand All @@ -344,21 +344,14 @@ mod test {
test::run_plonkish_backend,
},
pcs::{
multilinear::{
Gemini, MultilinearBrakedown, MultilinearHyrax, MultilinearIpa, MultilinearKzg,
Zeromorph,
},
univariate::UnivariateKzg,
multilinear::{Gemini, Zeromorph},
univariate::{UnivariateIpa, UnivariateKzg},
},
util::{
code::BrakedownSpec6, expression::rotate::BinaryField, hash::Keccak256,
test::seeded_std_rng, transcript::Keccak256Transcript,
expression::rotate::BinaryField, test::seeded_std_rng, transcript::Keccak256Transcript,
},
};
use halo2_curves::{
bn256::{self, Bn256},
grumpkin,
};
use halo2_curves::{bn256::Bn256, grumpkin};

macro_rules! tests {
($suffix:ident, $pcs:ty, $num_vars_range:expr) => {
Expand All @@ -383,10 +376,12 @@ mod test {
};
}

tests!(brakedown, MultilinearBrakedown<bn256::Fr, Keccak256, BrakedownSpec6>);
tests!(hyrax, MultilinearHyrax<grumpkin::G1Affine>, 5..16);
tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
tests!(kzg, MultilinearKzg<Bn256>);
//tests!(brakedown, MultilinearBrakedown<bn256::Fr, Keccak256, BrakedownSpec6>);
//tests!(hyrax, MultilinearHyrax<grumpkin::G1Affine>, 5..16);
//tests!(ipa, MultilinearIpa<grumpkin::G1Affine>);
//tests!(kzg, MultilinearKzg<Bn256>);
tests!(gemini_kzg, Gemini<UnivariateKzg<Bn256>>);
tests!(zeromorph_kzg, Zeromorph<UnivariateKzg<Bn256>>);
tests!(gemini_ipa, Gemini<UnivariateIpa<grumpkin::G1Affine>>);
tests!(zeromorph_ipa, Zeromorph<UnivariateIpa<grumpkin::G1Affine>>);
}
27 changes: 12 additions & 15 deletions plonkish_backend/src/backend/hyperplonk/preprocessor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ pub(crate) fn preprocess<F: PrimeField, Pcs: PolynomialCommitmentScheme<F>>(

let num_vars = circuit_info.k;
let poly_size = 1 << num_vars;
// Batch size for the polynomial commitment scheme
// Batch size for the polynomial commitment scheme
let batch_size = batch_size(circuit_info);
// Trim the parameters for the PCS to those necessary for the size of the circuit
let (pcs_pp, pcs_vp) = Pcs::trim(param, poly_size, batch_size)?;
Expand Down Expand Up @@ -171,7 +171,7 @@ pub(super) fn max_degree<F: PrimeField>(
.unwrap()
}

//generate lookup constraints using logup GKR
//generate lookup constraints using logup GKR
pub(super) fn lookup_constraints<F: PrimeField>(
circuit_info: &PlonkishCircuitInfo<F>,
beta: &Expression<F>,
Expand All @@ -186,23 +186,19 @@ pub(super) fn lookup_constraints<F: PrimeField>(
.zip(m_offset..)
.zip(h_offset..)
.flat_map(|((lookup, m), h)| {
// make m and h into polynomials, these are created during proving
// make m and h into polynomials, these are created during proving
let [m, h] = &[m, h]
.map(|poly| Query::new(poly, Rotation::cur()))
.map(Expression::<F>::Polynomial);
// separate the input and tables from the lookup
let (inputs, tables) = lookup
.iter()
.map(|(input, table)| (input, table))
.unzip::<_, _, Vec<_>, Vec<_>>();
// Returns a distributed power expression for the input and table, with base beta, i.e. inputs[0] + \beta inputs[1] + \beta^2 inputs[2] + ...
let (inputs, tables) = lookup.iter().cloned().unzip::<_, _, Vec<_>, Vec<_>>();
let input = &Expression::distribute_powers(inputs, beta);
let table = &Expression::distribute_powers(tables, beta);
// h[i] = (gamma + input[i])^-1 - m[i] * (gamma + table[i])^-1
[h * (input + gamma) * (table + gamma) - (table + gamma) + m * (input + gamma)]
})
.collect_vec();
// Every expression that must be proved in the sum check argument
// Every expression that must be proved in the sum check argument
let sum_check = (h_offset..)
.take(circuit_info.lookups.len())
.map(|h| Query::new(h, Rotation::cur()).into())
Expand All @@ -226,7 +222,7 @@ pub(crate) fn permutation_constraints<F: PrimeField>(
// The offset is set to the total number of instance columns in the circuit
let permutation_offset = circuit_info.num_poly();
let z_offset = permutation_offset + permutation_polys.len() + num_builtin_witness_polys;
// Represent all columns in permutation argument with polynomials
// Represent all columns in permutation argument with polynomials
let polys = permutation_polys
.iter()
.map(|idx| Expression::Polynomial(Query::new(*idx, Rotation::cur())))
Expand All @@ -252,12 +248,12 @@ pub(crate) fn permutation_constraints<F: PrimeField>(
let z_0_next = Expression::<F>::Polynomial(Query::new(z_offset, Rotation::next()));
let l_0 = &Expression::<F>::lagrange(0);
let one = &Expression::one();
// Create the constraints for the permutation argument
// Create the constraints for the permutation argument
// The contraints here are the like those from the halo2 gitbook but the matrix Z_0 Z_1 ... Z_{b-1} is transposed
let constraints = chain![
zs.first().map(|z_0| l_0 * (z_0 - one)),
polys
//iterating over b elements which are vectors of length m
//iterating over b elements which are vectors of length m
.chunks(chunk_size)
.zip(ids.chunks(chunk_size))
.zip(permutations.chunks(chunk_size))
Expand All @@ -284,7 +280,7 @@ pub(crate) fn permutation_constraints<F: PrimeField>(
(num_chunks, constraints)
}

// Generate multi-linear permutation polynomials for permutation argument
// Generate multi-linear permutation polynomials for permutation argument
pub(crate) fn permutation_polys<F: PrimeField>(
num_vars: usize,
permutation_polys: &[usize],
Expand All @@ -298,7 +294,8 @@ pub(crate) fn permutation_polys<F: PrimeField>(
}
poly_index
};
// permutations will be the matrix defining all permutation polynomials. As we start with the identity permutation, all entries have value of the index within the matrix.
// Permutations will be the matrix defining all permutation polynomials.
// As we start with the identity permutation, all entries have value of the index within the matrix.
let mut permutations = (0..permutation_polys.len() as u64)
.map(|idx| {
steps(F::from(idx << num_vars))
Expand All @@ -314,7 +311,7 @@ pub(crate) fn permutation_polys<F: PrimeField>(
mem::swap(&mut permutations[poly_index[i]][j], &mut last);
}
}
// We generate a multilinear polynomial from each column of the permutation matrix.
// We generate a multilinear polynomial from each column of the permutation matrix.
permutations
.into_iter()
.map(MultilinearPolynomial::new)
Expand Down
13 changes: 5 additions & 8 deletions plonkish_backend/src/backend/hyperplonk/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ pub(crate) fn lookup_compressed_polys<F: PrimeField, R: Rotatable + From<usize>>

let polys = polys.iter().map(Borrow::borrow).collect_vec();
let num_vars = polys[0].num_vars();
// This is the sum of all elements in the input and table
// This is the sum of all elements in the input and table
let expression = lookups
.iter()
.flat_map(|lookup| lookup.iter().map(|(input, table)| (input + table)))
Expand Down Expand Up @@ -124,17 +124,14 @@ pub(super) fn lookup_compressed_poly<F: PrimeField, R: Rotatable + From<usize>>(
};

// split inputs and tables into separate vectors
let (inputs, tables) = lookup
.iter()
.map(|(input, table)| (input, table))
.unzip::<_, _, Vec<_>, Vec<_>>();
let (inputs, tables) = lookup.iter().cloned().unzip::<_, _, Vec<_>, Vec<_>>();

let timer = start_timer(|| "compressed_input_poly");
let compressed_input_poly = compress(&inputs);
let compressed_input_poly = compress(&inputs.iter().collect::<Vec<_>>());
end_timer(timer);

let timer = start_timer(|| "compressed_table_poly");
let compressed_table_poly = compress(&tables);
let compressed_table_poly = compress(&tables.iter().collect::<Vec<_>>());
end_timer(timer);

[compressed_input_poly, compressed_table_poly]
Expand Down Expand Up @@ -173,7 +170,7 @@ pub(super) fn lookup_m_poly<F: PrimeField + Hash>(
.and_modify(|count| *count += 1)
.or_insert(1);
} else {
// If the input is not found in the table, the lookup is invalid
// If the input is not found in the table, the lookup is invalid
*valid = false;
break;
}
Expand Down
2 changes: 1 addition & 1 deletion plonkish_backend/src/backend/hyperplonk/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ pub fn vanilla_plonk_circuit_info<F: PrimeField>(
preprocess_polys: [Vec<F>; 5],
permutations: Vec<Vec<(usize, usize)>>,
) -> PlonkishCircuitInfo<F> {
let [pi, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] =
let [pi, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o]: &[Expression<F>; 9] =
&array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::Polynomial);
PlonkishCircuitInfo {
k: num_vars,
Expand Down
2 changes: 2 additions & 0 deletions plonkish_backend/src/backend/unihyperplonk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -413,13 +413,15 @@ mod test {
($suffix:ident, $pcs:ty, $additive:literal, $num_vars_range:expr) => {
paste::paste! {
#[test]
#[ignore = "we do not currently use UniHyperPlonk"]
fn [<vanilla_plonk_w_ $suffix>]() {
run_plonkish_backend::<_, UniHyperPlonk<$pcs, $additive>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| {
rand_vanilla_plonk_circuit::<_, Lexical>(num_vars, seeded_std_rng(), seeded_std_rng())
});
}

#[test]
#[ignore = "we do not currently use UniHyperPlonk"]
fn [<vanilla_plonk_w_lookup_w_ $suffix>]() {
run_plonkish_backend::<_, UniHyperPlonk<$pcs, $additive>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| {
rand_vanilla_plonk_w_lookup_circuit::<_, Lexical>(num_vars, seeded_std_rng(), seeded_std_rng())
Expand Down
Loading

0 comments on commit 44e8a7b

Please sign in to comment.