Skip to content
Snippets Groups Projects
Commit dc06bbf6 authored by E__Man's Association's avatar E__Man's Association
Browse files

Added tests for frida (not pcs)

parent e0f88c06
Branches
Tags
No related merge requests found
......@@ -4,9 +4,9 @@
//!
//! These implementations are opt-in and only provided if the corresponding features are enabled.
#[cfg(feature = "blake3")]
#[cfg(any(test, feature = "blake3"))]
pub mod blake3;
#[cfg(feature = "blake3")]
#[cfg(any(test, feature = "blake3"))]
pub use blake3::Blake3;
#[cfg(feature = "sha3")]
......
......@@ -22,6 +22,9 @@ mod pcs;
#[cfg(feature = "frida_pcs")]
pub use pcs::*;
#[cfg(test)]
mod tests;
#[derive(From, Clone, Copy, PartialEq, Eq, Debug)]
pub enum FridaError {
InvalidFriProof(VerifyError),
......@@ -33,7 +36,8 @@ pub enum FridaError {
pub struct FridaBuilder<F, H: Hasher> {
tree: MerkleTree<H>,
fri_proof: FriProof<F, H>,
zipped_queries: Vec<Vec<F>>,
zipped_queries: Vec<F>,
num_poly: usize,
}
impl<F: FftField, H: Hasher> FridaBuilder<F, H> {
......@@ -85,13 +89,14 @@ impl<F: FftField, H: Hasher> FridaBuilder<F, H> {
let positions = rng.last_positions();
let zipped_queries = positions
.iter()
.map(|&pos| nth_evaluations(evaluations, pos).collect())
.flat_map(|&pos| nth_evaluations(evaluations, pos))
.collect();
Self {
tree,
fri_proof: proof,
zipped_queries,
num_poly: evaluations.len(),
}
}
......@@ -120,7 +125,8 @@ impl<F: FftField, H: Hasher> FridaBuilder<F, H> {
#[derive(CanonicalDeserializeAlt, CanonicalSerializeAlt)]
pub struct FridaCommitment<F, H: Hasher> {
zipped_root: H::Hash,
zipped_queries: Vec<Vec<F>>,
zipped_queries: Vec<F>,
num_poly: usize,
fri_proof: FriProof<F, H>,
}
......@@ -157,9 +163,21 @@ impl<F: FftField, H: Hasher> FridaCommitment<F, H> {
.queried_evaluations::<N>(positions, &folded_postions, domain_size)
.unwrap();
if queried != batch_polynomials(&self.zipped_queries, alpha) {
if queried.len() * self.num_poly != self.zipped_queries.len() {
return Err(FridaError::InvalidZippedQueries);
}
for i in 0..queried.len() {
if queried[i]
!= evaluate(
self.zipped_queries[(i * self.num_poly)..((i + 1) * self.num_poly)]
.iter()
.copied(),
alpha,
)
{
return Err(FridaError::InvalidZippedQueries);
}
}
Ok(())
}
......@@ -172,6 +190,7 @@ impl<F: FftField, H: Hasher> From<FridaBuilder<F, H>> for FridaCommitment<F, H>
zipped_root,
zipped_queries: value.zipped_queries,
fri_proof: value.fri_proof,
num_poly: value.num_poly,
}
}
}
......@@ -182,13 +201,21 @@ impl<F: FftField, H: Hasher> From<FridaBuilder<F, H>> for FridaCommitment<F, H>
pub fn batch_polynomials<F: Field>(evaluations: &[Vec<F>], alpha: F) -> Vec<F> {
let mut combined_poly = Vec::with_capacity(evaluations[0].len());
for i in 0..evaluations[0].len() {
combined_poly.push(
nth_evaluations(evaluations, i).rfold(F::ZERO, |result, eval| result * alpha + eval),
)
combined_poly.push(evaluate(nth_evaluations(evaluations, i), alpha))
}
combined_poly
}
#[inline]
fn evaluate<F: Field, I: IntoIterator<Item = F>>(coeffs: I, alpha: F) -> F
where
I::IntoIter: DoubleEndedIterator,
{
coeffs
.into_iter()
.rfold(F::ZERO, |result, eval| result * alpha + eval)
}
/// Returns `(poly[n] for poly in evaluations)`
#[inline]
fn nth_evaluations<F: Copy>(
......
use fri_test_utils::{
do_for_multiple_folding_factors, random_file, Fq, BLOWUP_FACTOR, DOMAIN_SIZE,
NUMBER_OF_POLYNOMIALS, NUM_QUERIES, POLY_COEFFS_LEN,
};
use rand::{thread_rng, Rng};
use crate::{
algorithms::Blake3,
frida::{nth_evaluations, FridaCommitment},
rng::FriChallenger,
utils::{to_evaluations, HasherExt},
};
use super::FridaBuilder;
#[test]
fn test_frida() {
let rng = FriChallenger::<Blake3>::default();
let file = random_file::<Fq>(POLY_COEFFS_LEN, NUMBER_OF_POLYNOMIALS)
.into_iter()
.map(|poly| to_evaluations(poly, DOMAIN_SIZE))
.collect::<Vec<_>>();
do_for_multiple_folding_factors!(FACTOR = 2, 4, 8, 16 => {
let builder =
FridaBuilder::<_, Blake3>::new::<FACTOR, _>(&file, rng.clone(), BLOWUP_FACTOR, 1, NUM_QUERIES);
let mut rng = thread_rng();
let position = rng.gen_range(0..DOMAIN_SIZE);
let proof = builder.prove_shards(&[position]);
let mut positions = [rng.gen_range(0..DOMAIN_SIZE), rng.gen_range(0..DOMAIN_SIZE)];
positions.sort();
let proof2 = builder.prove_shards(&positions);
let commit = FridaCommitment::from(builder);
let rng = FriChallenger::<Blake3>::default();
commit
.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE)
.unwrap();
assert!(commit.verify::<{FACTOR*2}, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
assert!(commit.verify::<{FACTOR/2}, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
assert!(commit.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE / 2).is_err());
assert!(commit.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE * 2).is_err());
assert!(commit.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN / 2, DOMAIN_SIZE).is_err());
assert!(proof.verify(commit.tree_root(), &[position], &[Blake3::hash_item(&nth_evaluations(&file, position).collect::<Vec<_>>())], DOMAIN_SIZE));
assert!(!proof.verify(commit.tree_root(), &[position], &[Blake3::hash_item(&nth_evaluations(&file, position + 1).collect::<Vec<_>>())], DOMAIN_SIZE));
assert!(proof2.verify(commit.tree_root(), &positions, &Blake3::hash_many(&positions.iter().map(|&p| nth_evaluations(&file, p).collect::<Vec<_>>()).collect::<Vec<_>>()), DOMAIN_SIZE));
});
}
use fri_test_utils::{Fq, BLOWUP_FACTOR, DOMAIN_SIZE, NUMBER_OF_POLYNOMIALS, NUM_QUERIES, POLY_COEFFS_LEN};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use fri_test_utils::{
do_for_multiple_folding_factors, Fq, BLOWUP_FACTOR, DOMAIN_SIZE, NUMBER_OF_POLYNOMIALS,
NUM_QUERIES, POLY_COEFFS_LEN,
};
use rand::{thread_rng, Rng};
use winter_math::{fields::f128::BaseElement, FieldElement, StarkField};
use winter_rand_utils::{rand_value, rand_vector};
......@@ -10,19 +14,9 @@ use crate::{
folding::{reduce_polynomial, FoldedEvaluations},
rng::FriChallenger,
utils::to_evaluations,
FriProof,
};
macro_rules! do_for_multiple_folding_factors {
($factor: ident = $($factors: literal),* => $action: block) => {
{
$({
const $factor: usize = $factors;
$action;
})*
}
};
}
// This assumes winterfri is correct
#[test]
fn test_reduction() {
......@@ -66,17 +60,39 @@ fn test_prove_verify() {
let proof = build_proof(commitments, &mut rng, NUM_QUERIES);
rng.reset();
proof.verify::<FACTOR, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).unwrap();
proof.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).unwrap();
assert!(proof.verify::<{FACTOR*2}, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
assert!(proof.verify::<{FACTOR/2}, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
assert!(proof.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE * 2).is_err());
assert!(proof.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE / 2).is_err());
assert!(proof.verify::<FACTOR, _>(rng.clone(), NUM_QUERIES, POLY_COEFFS_LEN / 2, DOMAIN_SIZE).is_err());
});
}
assert!(proof.verify::<{FACTOR*2}, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
assert!(proof.verify::<{FACTOR/2}, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).is_err());
#[test]
fn test_serialization() {
let mut rng = thread_rng();
let poly: Vec<Fq> = (0..POLY_COEFFS_LEN).map(|_| rng.gen()).collect();
assert!(proof.verify::<FACTOR, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE * 2).is_err());
assert!(proof.verify::<FACTOR, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE / 2).is_err());
do_for_multiple_folding_factors!(FACTOR = 2, 4, 8, 16 => {
println!("--Folding factor={FACTOR}");
assert!(proof.verify::<FACTOR, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN * 2, DOMAIN_SIZE).is_err());
assert!(proof.verify::<FACTOR, _>(&mut rng, NUM_QUERIES, POLY_COEFFS_LEN / 2, DOMAIN_SIZE).is_err());
let mut rng = FriChallenger::<Blake3>::default();
let commitments = commit_polynomial::<FACTOR, _, Blake3, _>(poly.clone(), &mut rng, BLOWUP_FACTOR, 1);
let proof = build_proof(commitments, &mut rng, NUM_QUERIES);
let mut proof_bytes = vec![];
proof.serialize_compressed(&mut proof_bytes).unwrap();
let proof2 = FriProof::deserialize_compressed(&proof_bytes[..]).unwrap();
assert_eq!(proof, proof2);
rng.reset();
proof2.verify::<FACTOR, _>(rng, NUM_QUERIES, POLY_COEFFS_LEN, DOMAIN_SIZE).unwrap();
});
}
......
......@@ -7,3 +7,4 @@ edition = "2021"
[dependencies]
ark-ff = { version = "0.4.2", default-features = false }
rand = "0.8.5"
//! Code shared between tests and benches
use ark_ff::{Fp128, MontBackend, MontConfig};
use rand::{distributions::{Distribution, Standard}, thread_rng, Rng};
pub const NUMBER_OF_POLYNOMIALS: usize = 10;
pub const POLY_COEFFS_LEN: usize = 4096;
......@@ -16,3 +17,21 @@ pub const DOMAIN_SIZE: usize = (POLY_COEFFS_LEN * BLOWUP_FACTOR).next_power_of_t
pub struct Test;
/// A prime, fft-friendly field isomorph to [`winter_math::fields::f128::BaseElement`].
pub type Fq = Fp128<MontBackend<Test, 2>>;
pub fn random_file<F: Clone>(k: usize, nb_polynomials: usize) -> Vec<Vec<F>> where Standard: Distribution<F> {
let nb_items = k * nb_polynomials;
let mut rng = thread_rng();
(0..nb_items).map(|_| rng.gen()).collect::<Vec<_>>().chunks_exact(k).map(<[F]>::to_vec).collect()
}
#[macro_export]
macro_rules! do_for_multiple_folding_factors {
($factor: ident = $($factors: literal),* => $action: block) => {
{
$({
const $factor: usize = $factors;
$action;
})*
}
};
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment