From c2ca863ca6ebd9ef6b95d0579381b8b3a16023da Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 10:23:04 +0100 Subject: [PATCH 01/22] show matrix shape when not a square --- src/error.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/error.rs b/src/error.rs index bd25d7aa..ddd278ea 100644 --- a/src/error.rs +++ b/src/error.rs @@ -4,7 +4,7 @@ use thiserror::Error; pub enum KomodoError { #[error("Invalid matrix elements: {0}")] InvalidMatrixElements(String), - #[error("Matrix is not a square")] + #[error("Matrix is not a square, ({0} x {1})")] NonSquareMatrix(usize, usize), #[error("Matrix is not invertible at row {0}")] NonInvertibleMatrix(usize), -- GitLab From 481013ee9254ba7d1e82beccc01bfd176e033ef6 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 10:23:23 +0100 Subject: [PATCH 02/22] add "shards" to the "too few shards" error --- src/error.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/error.rs b/src/error.rs index ddd278ea..fc8a992f 100644 --- a/src/error.rs +++ b/src/error.rs @@ -10,7 +10,7 @@ pub enum KomodoError { NonInvertibleMatrix(usize), #[error("Matrices don't have compatible shapes: ({0} x {1}) and ({2} x {3})")] IncompatibleMatrixShapes(usize, usize, usize, usize), - #[error("Expected at least {1}, got {0}")] + #[error("Expected at least {1} shards, got {0}")] TooFewShards(usize, usize), #[error("Blocks are incompatible: {0}")] IncompatibleBlocks(String), -- GitLab From dcc8828514234be427e138f31b8f14255d7bbb7b Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 10:43:13 +0100 Subject: [PATCH 03/22] add a test for `setup::random` --- src/setup.rs | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/src/setup.rs b/src/setup.rs index d4a64cce..9429c8af 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -50,3 +50,52 @@ where Ok(powers) } + +#[cfg(test)] +mod tests { + use std::ops::Div; + + use ark_bls12_381::Bls12_381; + use ark_ec::pairing::Pairing; + use ark_ff::PrimeField; + use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial}; + + use super::random; + + type UniPoly381 = DensePolynomial<<Bls12_381 as Pairing>::ScalarField>; + + /// computes $a / b$ rounded to the integer above + /// + /// > **Note** + /// > co-authored by ChatGPT + fn ceil_divide(a: usize, b: usize) -> usize { + (a + b - 1) / b + } + + #[test] + fn test_ceil_divide() { + assert_eq!(ceil_divide(10, 2), 5); + assert_eq!(ceil_divide(10, 3), 4); + assert_eq!(ceil_divide(10, 6), 2); + } + + fn random_setup_size_template<E, P>(nb_bytes: usize) + where + E: Pairing, + P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, + for<'a, 'b> &'a P: Div<&'b P, Output = P>, + { + let powers = random::<E, P>(nb_bytes); + assert!(powers.is_ok()); + + assert_eq!( + powers.unwrap().powers_of_g.to_vec().len(), + ceil_divide(nb_bytes, E::ScalarField::MODULUS_BIT_SIZE as usize / 8) + ); + } + + #[test] + fn random_setup_size() { + random_setup_size_template::<Bls12_381, UniPoly381>(10 * 1_024); + } +} -- GitLab From 40384ba669ec323e616915fa3d4a0aaa4f0ee80a Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 11:11:58 +0100 Subject: [PATCH 04/22] simplify `linalg::Matrix::random` --- src/linalg.rs | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/src/linalg.rs b/src/linalg.rs index 48cac2a4..140796fb 100644 --- a/src/linalg.rs +++ b/src/linalg.rs @@ -56,19 +56,11 @@ impl<T: Field> Matrix<T> { pub fn random(n: usize, m: usize) -> Self { let mut rng = rand::thread_rng(); - Matrix::from_vec_vec( - (0..n) - .map(|_| { - (0..m) - .map(|_| { - let element: u128 = rng.gen(); - T::from(element) - }) - .collect() - }) - .collect::<Vec<Vec<T>>>(), - ) - .unwrap() + Self { + elements: (0..(n * m)).map(|_| T::from(rng.gen::<u128>())).collect(), + height: n, + width: m, + } } pub fn from_vec_vec(matrix: Vec<Vec<T>>) -> Result<Self, KomodoError> { -- GitLab From 6247129007fbef34af4ac7367bca1b49f816ae1e Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 11:19:51 +0100 Subject: [PATCH 05/22] format matrix in `linalg::tests` --- src/linalg.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/linalg.rs b/src/linalg.rs index 140796fb..94e0ebca 100644 --- a/src/linalg.rs +++ b/src/linalg.rs @@ -465,9 +465,9 @@ mod tests { let actual = Matrix::<Fr>::vandermonde(&mat_to_elements(vec![vec![0, 1, 2, 3, 4]])[0], 4); #[rustfmt::skip] let expected = Matrix::from_vec_vec(mat_to_elements(vec![ - vec![1, 1, 1, 1, 1], - vec![0, 1, 2, 3, 4], - vec![0, 1, 4, 9, 16], + vec![1, 1, 1, 1, 1], + vec![0, 1, 2, 3, 4], + vec![0, 1, 4, 9, 16], vec![0, 1, 8, 27, 64], ])) .unwrap(); -- GitLab From 6c25b6037cdec22e0bfcc1aaddbd18407a2badca Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 11:31:50 +0100 Subject: [PATCH 06/22] rename `fec::Shard::bytes` to `fec::Shard::data` --- src/fec.rs | 15 +++++++-------- src/lib.rs | 4 ++-- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/src/fec.rs b/src/fec.rs index 2c834718..c94ed803 100644 --- a/src/fec.rs +++ b/src/fec.rs @@ -15,7 +15,7 @@ pub struct Shard<E: Pairing> { pub k: u32, pub linear_combination: Vec<E::ScalarField>, pub hash: Vec<u8>, - pub bytes: Vec<E::ScalarField>, + pub data: Vec<E::ScalarField>, pub size: usize, } @@ -36,10 +36,10 @@ impl<E: Pairing> Shard<E> { .map(|(l, r)| l.mul(alpha) + r.mul(beta)) .collect(), hash: self.hash.clone(), - bytes: self - .bytes + data: self + .data .iter() - .zip(other.bytes.iter()) + .zip(other.data.iter()) .map(|(es, eo)| es.mul(alpha).add(eo.mul(beta))) .collect::<Vec<_>>(), size: self.size, @@ -93,7 +93,7 @@ pub fn encode<E: Pairing>( k: k as u32, linear_combination: encoding_mat.get_col(j).unwrap(), hash: hash.clone(), - bytes: s.to_vec(), + data: s.to_vec(), size: data.len(), }) .collect()) @@ -119,7 +119,7 @@ pub fn decode<E: Pairing>(blocks: Vec<Shard<E>>) -> Result<Vec<u8>, KomodoError> blocks .iter() .take(k as usize) - .map(|b| b.bytes.clone()) + .map(|b| b.data.clone()) .collect(), )?; @@ -204,12 +204,11 @@ mod tests { linear_combination: &[E::ScalarField], bytes: &[u8], ) -> Shard<E> { - let bytes = field::split_data_into_field_elements::<E>(bytes, 1); Shard { k: 2, linear_combination: linear_combination.to_vec(), hash: vec![], - bytes, + data: field::split_data_into_field_elements::<E>(bytes, 1), size: 0, } } diff --git a/src/lib.rs b/src/lib.rs index d581ff9a..8e6c2d71 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -41,7 +41,7 @@ impl<E: Pairing> std::fmt::Display for Block<E> { write!(f, "]")?; write!(f, ",")?; write!(f, "bytes: [")?; - for x in &self.shard.bytes { + for x in &self.shard.data { if x.is_zero() { write!(f, "0,")?; } else { @@ -196,7 +196,7 @@ where P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { - let elements = block.shard.bytes.clone(); + let elements = block.shard.data.clone(); let polynomial = P::from_coefficients_vec(elements); let (commit, _) = KZG10::<E, P>::commit(verifier_key, &polynomial, None, None)?; -- GitLab From f25810bf4d7a5d6020a4db8204377b9313e9003b Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 11:34:07 +0100 Subject: [PATCH 07/22] rename arguments in `fec::decode` --- src/fec.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/fec.rs b/src/fec.rs index c94ed803..63d7d47e 100644 --- a/src/fec.rs +++ b/src/fec.rs @@ -99,34 +99,34 @@ pub fn encode<E: Pairing>( .collect()) } -pub fn decode<E: Pairing>(blocks: Vec<Shard<E>>) -> Result<Vec<u8>, KomodoError> { - let k = blocks[0].k; - let np = blocks.len(); +pub fn decode<E: Pairing>(shards: Vec<Shard<E>>) -> Result<Vec<u8>, KomodoError> { + let k = shards[0].k; + let np = shards.len(); if np < k as usize { return Err(KomodoError::TooFewShards(np, k as usize)); } let encoding_mat = Matrix::from_vec_vec( - blocks + shards .iter() .map(|b| b.linear_combination.clone()) .collect(), )? .truncate(Some(np - k as usize), None); - let shards = Matrix::from_vec_vec( - blocks + let shard_mat = Matrix::from_vec_vec( + shards .iter() .take(k as usize) .map(|b| b.data.clone()) .collect(), )?; - let source_shards = encoding_mat.invert()?.mul(&shards)?.transpose().elements; + let source_shards = encoding_mat.invert()?.mul(&shard_mat)?.transpose().elements; let mut bytes = field::merge_elements_into_bytes::<E>(&source_shards); - bytes.resize(blocks[0].size, 0); + bytes.resize(shards[0].size, 0); Ok(bytes) } -- GitLab From ba04b19daac7798fb5262328deccc2697a5ead30 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 13:19:04 +0100 Subject: [PATCH 08/22] rename functions in `fec::tests` --- src/fec.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/fec.rs b/src/fec.rs index 63d7d47e..ceabd086 100644 --- a/src/fec.rs +++ b/src/fec.rs @@ -153,7 +153,7 @@ mod tests { E::ScalarField::from_le_bytes_mod_order(&n.to_le_bytes()) } - fn decoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) { + fn end_to_end_template<E: Pairing>(data: &[u8], k: usize, n: usize) { let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", data.len(), k, n); assert_eq!( data, @@ -163,18 +163,18 @@ mod tests { } #[test] - fn decoding() { + fn end_to_end() { let bytes = bytes(); let (k, n) = (3, 5); let modulus_byte_size = <Bls12_381 as Pairing>::ScalarField::MODULUS_BIT_SIZE as usize / 8; // NOTE: starting at `modulus_byte_size * (k - 1) + 1` to include at least _k_ elements for b in (modulus_byte_size * (k - 1) + 1)..bytes.len() { - decoding_template::<Bls12_381>(&bytes[..b], k, n); + end_to_end_template::<Bls12_381>(&bytes[..b], k, n); } } - fn decoding_with_recoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) { + fn end_to_end_with_recoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) { let mut shards = encode(data, &Matrix::random(k, n)).unwrap(); shards[1] = shards[2].combine(to_curve::<E>(7), &shards[4], to_curve::<E>(6)); shards[2] = shards[1].combine(to_curve::<E>(5), &shards[3], to_curve::<E>(4)); @@ -189,14 +189,14 @@ mod tests { } #[test] - fn decoding_with_recoding() { + fn end_to_end_with_recoding() { let bytes = bytes(); let (k, n) = (3, 5); let modulus_byte_size = <Bls12_381 as Pairing>::ScalarField::MODULUS_BIT_SIZE as usize / 8; // NOTE: starting at `modulus_byte_size * (k - 1) + 1` to include at least _k_ elements for b in (modulus_byte_size * (k - 1) + 1)..bytes.len() { - decoding_with_recoding_template::<Bls12_381>(&bytes[..b], k, n); + end_to_end_with_recoding_template::<Bls12_381>(&bytes[..b], k, n); } } -- GitLab From 0b7af04abd750a8a7ddeb251d8604713fd756510 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 13:35:54 +0100 Subject: [PATCH 09/22] refactor the "end to end" tests in `fec::tests` --- src/fec.rs | 35 +++++++++++++++++++---------------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/fec.rs b/src/fec.rs index ceabd086..783147d0 100644 --- a/src/fec.rs +++ b/src/fec.rs @@ -162,18 +162,6 @@ mod tests { ); } - #[test] - fn end_to_end() { - let bytes = bytes(); - let (k, n) = (3, 5); - - let modulus_byte_size = <Bls12_381 as Pairing>::ScalarField::MODULUS_BIT_SIZE as usize / 8; - // NOTE: starting at `modulus_byte_size * (k - 1) + 1` to include at least _k_ elements - for b in (modulus_byte_size * (k - 1) + 1)..bytes.len() { - end_to_end_template::<Bls12_381>(&bytes[..b], k, n); - } - } - fn end_to_end_with_recoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) { let mut shards = encode(data, &Matrix::random(k, n)).unwrap(); shards[1] = shards[2].combine(to_curve::<E>(7), &shards[4], to_curve::<E>(6)); @@ -188,18 +176,33 @@ mod tests { ); } - #[test] - fn end_to_end_with_recoding() { + // NOTE: this is part of an experiment, to be honest, to be able to see how + // much these tests could be refactored and simplified + fn run_template<E, F>(test: F) + where + E: Pairing, + F: Fn(&[u8], usize, usize), + { let bytes = bytes(); let (k, n) = (3, 5); - let modulus_byte_size = <Bls12_381 as Pairing>::ScalarField::MODULUS_BIT_SIZE as usize / 8; + let modulus_byte_size = E::ScalarField::MODULUS_BIT_SIZE as usize / 8; // NOTE: starting at `modulus_byte_size * (k - 1) + 1` to include at least _k_ elements for b in (modulus_byte_size * (k - 1) + 1)..bytes.len() { - end_to_end_with_recoding_template::<Bls12_381>(&bytes[..b], k, n); + test(&bytes[..b], k, n); } } + #[test] + fn end_to_end() { + run_template::<Bls12_381, _>(end_to_end_template::<Bls12_381>); + } + + #[test] + fn end_to_end_with_recoding() { + run_template::<Bls12_381, _>(end_to_end_with_recoding_template::<Bls12_381>); + } + fn create_fake_shard<E: Pairing>( linear_combination: &[E::ScalarField], bytes: &[u8], -- GitLab From 72a5ba6990650689847031306325442d7d21620f Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 10:30:15 +0100 Subject: [PATCH 10/22] add documentation --- src/fec.rs | 29 ++++++++++++++ src/lib.rs | 29 ++++++++++++++ src/linalg.rs | 104 ++++++++++++++++++++++++++++++++++++++++++++++++++ src/setup.rs | 9 +++++ 4 files changed, 171 insertions(+) diff --git a/src/fec.rs b/src/fec.rs index 783147d0..81e11cd5 100644 --- a/src/fec.rs +++ b/src/fec.rs @@ -1,3 +1,4 @@ +//! a module to encode, recode and decode shards of data with FEC methods use std::ops::{Add, Mul}; use ark_ec::pairing::Pairing; @@ -10,6 +11,13 @@ use crate::error::KomodoError; use crate::field; use crate::linalg::Matrix; +/// representation of a FEC shard of data +/// +/// - `k` is the code parameter, required to decode +/// - the _linear combination_ tells the decoded how the shard was constructed, +/// with respect to the original source shards => this effectively allows +/// support for _recoding_ +/// - the hash and the size represent the original data #[derive(Debug, Default, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize)] pub struct Shard<E: Pairing> { pub k: u32, @@ -20,6 +28,7 @@ pub struct Shard<E: Pairing> { } impl<E: Pairing> Shard<E> { + /// compute the linear combination between two [`Shard`]s pub fn combine(&self, alpha: E::ScalarField, other: &Self, beta: E::ScalarField) -> Self { if alpha.is_zero() { return other.clone(); @@ -47,6 +56,13 @@ impl<E: Pairing> Shard<E> { } } +/// compute the linear combination between an arbitrary number of [`Shard`]s +/// +/// > **Note** +/// > this is basically a multi-[`Shard`] wrapper around [`Shard::combine`] +/// > +/// > returns [`None`] if number of shards is not the same as the number of +/// > coefficients or if no shards are provided. pub(super) fn combine<E: Pairing>( shards: &[Shard<E>], coeffs: &[E::ScalarField], @@ -68,6 +84,11 @@ pub(super) fn combine<E: Pairing>( Some(s) } +/// applies a given encoding matrix to some data to generate encoded shards +/// +/// > **Note** +/// > the input data and the encoding matrix should have compatible shapes, +/// > otherwise, an error might be thrown to the caller. pub fn encode<E: Pairing>( data: &[u8], encoding_mat: &Matrix<E::ScalarField>, @@ -99,6 +120,13 @@ pub fn encode<E: Pairing>( .collect()) } +/// reconstruct the original data from a set of encoded, possibly recoded, +/// shards +/// +/// > **Note** +/// > this function might fail in a variety of cases +/// > - if there are too few shards +/// > - if there are linear dependencies between shards pub fn decode<E: Pairing>(shards: Vec<Shard<E>>) -> Result<Vec<u8>, KomodoError> { let k = shards[0].k; let np = shards.len(); @@ -162,6 +190,7 @@ mod tests { ); } + /// k should be at least 5 fn end_to_end_with_recoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) { let mut shards = encode(data, &Matrix::random(k, n)).unwrap(); shards[1] = shards[2].combine(to_curve::<E>(7), &shards[4], to_curve::<E>(6)); diff --git a/src/lib.rs b/src/lib.rs index 8e6c2d71..fb0f463e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +//! Komodo: Cryptographically-proven Erasure Coding use std::ops::Div; use ark_ec::pairing::Pairing; @@ -19,6 +20,10 @@ use error::KomodoError; use crate::linalg::Matrix; +/// representation of a block of proven data. +/// +/// this is a wrapper around a [`fec::Shard`] with some additional cryptographic +/// information that allows to prove the integrity of said shard. #[derive(Debug, Default, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize)] pub struct Block<E: Pairing> { pub shard: fec::Shard<E>, @@ -74,6 +79,14 @@ impl<E: Pairing> std::fmt::Display for Block<E> { } } +/// compute the commitments and randomnesses of a set of polynomials +/// +/// this function uses the commit scheme of KZG. +/// +/// > **Note** +/// > - `powers` can be generated with functions like [`setup::random`] +/// > - if `polynomials` has length `n`, then [`commit`] will generate `n` +/// > commits and `n` randomnesses. #[allow(clippy::type_complexity)] pub fn commit<E, P>( powers: &Powers<E>, @@ -95,6 +108,11 @@ where Ok((commits, randomnesses)) } +/// compute encoded and proven blocks of data from some data and an encoding +/// method +/// +/// > **Note** +/// > this is a wrapper around [`fec::encode`]. pub fn encode<E, P>( bytes: &[u8], encoding_mat: &Matrix<E::ScalarField>, @@ -139,6 +157,15 @@ where .collect::<Vec<_>>()) } +/// compute a recoded block from an arbitrary set of blocks +/// +/// coefficients will be drawn at random, one for each block. +/// +/// if the blocks appear to come from different data, e.g. if `k` is not the +/// same or the hash of the data is different, an error will be returned. +/// +/// > **Note** +/// > this is a wrapper around [`fec::combine`]. pub fn recode<E: Pairing>(blocks: &[Block<E>]) -> Result<Option<Block<E>>, KomodoError> { let mut rng = rand::thread_rng(); @@ -187,6 +214,7 @@ pub fn recode<E: Pairing>(blocks: &[Block<E>]) -> Result<Option<Block<E>>, Komod })) } +/// verify that a single block of encoded and proven data is valid pub fn verify<E, P>( block: &Block<E>, verifier_key: &Powers<E>, @@ -210,6 +238,7 @@ where Ok(Into::<E::G1>::into(commit.0) == rhs) } +/// verify at once that a bunch of encoded and proven blocks are valid pub fn batch_verify<E, P>( blocks: &[Block<E>], verifier_key: &Powers<E>, diff --git a/src/linalg.rs b/src/linalg.rs index 94e0ebca..6742d4cb 100644 --- a/src/linalg.rs +++ b/src/linalg.rs @@ -1,9 +1,18 @@ +//! some linear algebra fun +//! +//! this module mainly contains an implementation of matrices over a finite +//! field. use ark_ff::Field; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use rand::Rng; use crate::error::KomodoError; +/// a matrix defined over a finite field +/// +/// internally, a matrix is just a vector of field elements that whose length is +/// exactly the width times the height and where elements are organized row by +/// row. #[derive(Clone, PartialEq, Default, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Matrix<T: Field> { pub elements: Vec<T>, @@ -12,6 +21,18 @@ pub struct Matrix<T: Field> { } impl<T: Field> Matrix<T> { + /// build a matrix from a diagonal of elements + /// + /// # Example + /// building a diagonal matrix from the diagonal $[1, 2, 3, 4]$ will give + /// ```text + /// [ + /// [1, 0, 0, 0], + /// [0, 2, 0, 0], + /// [0, 0, 3, 0], + /// [0, 0, 0, 4], + /// ] + /// ``` fn from_diagonal(diagonal: Vec<T>) -> Self { let size = diagonal.len(); @@ -28,10 +49,44 @@ impl<T: Field> Matrix<T> { } } + /// build the identity matrix of a given size + /// + /// # Example + /// the identity of size 3 is + /// ```text + /// [ + /// [1, 0, 0], + /// [0, 1, 0], + /// [0, 0, 1], + /// ] + /// ``` fn identity(size: usize) -> Self { Self::from_diagonal(vec![T::one(); size]) } + /// build a Vandermonde matrix for some seed points + /// + /// actually, this is the tranpose of the Vandermonde matrix defined in the + /// [Wikipedia article][article], i.e. there are as many columns as there + /// are seed points and there are as many rows as there are powers of the + /// seed points. + /// + /// > **Note** + /// > the points need to be distinct. + /// > no runtime check will be performed to ensure that condition. + /// + /// # Example + /// a Vandermonde matrix of height 4 with seed points $[0, 1, 2, 3, 4]$ is + /// ```text + /// [ + /// [1, 1, 1, 1, 1], + /// [0, 1, 2, 3, 4], + /// [0, 1, 4, 9, 16], + /// [0, 1, 8, 27, 64], + /// ] + /// ``` + /// + /// [article]: https://en.wikipedia.org/wiki/Vandermonde_matrix pub fn vandermonde(points: &[T], height: usize) -> Self { let width = points.len(); @@ -53,6 +108,7 @@ impl<T: Field> Matrix<T> { } } + /// build a completely random matrix of shape $n \times m$ pub fn random(n: usize, m: usize) -> Self { let mut rng = rand::thread_rng(); @@ -63,6 +119,10 @@ impl<T: Field> Matrix<T> { } } + /// build a matrix from a "_matrix_" of elements + /// + /// > **Note** + /// > each row should have the same length pub fn from_vec_vec(matrix: Vec<Vec<T>>) -> Result<Self, KomodoError> { let height = matrix.len(); let width = matrix[0].len(); @@ -101,6 +161,10 @@ impl<T: Field> Matrix<T> { self.elements[i * self.width + j] = value; } + /// extract a single column from the matrix + /// + /// > **Note** + /// > returns `None` if the provided index is out of bounds pub(super) fn get_col(&self, j: usize) -> Option<Vec<T>> { if j >= self.width { return None; @@ -127,6 +191,12 @@ impl<T: Field> Matrix<T> { } } + /// compute the inverse of the matrix + /// + /// > **None** + /// > the matrix should be + /// > - square + /// > - invertible pub(super) fn invert(&self) -> Result<Self, KomodoError> { if self.height != self.width { return Err(KomodoError::NonSquareMatrix(self.height, self.width)); @@ -156,12 +226,27 @@ impl<T: Field> Matrix<T> { Ok(inverse) } + /// swap rows `i` and `j`, inplace + /// + /// > **Note** + /// > this function assumes both `i` and `j` are in bounds, unexpected + /// > results are expected if `i` or `j` are out of bounds. fn swap_rows(&mut self, i: usize, j: usize) { for k in 0..self.width { self.elements.swap(i * self.width + k, j * self.width + k); } } + /// compute the rank of the matrix + /// + /// > **None** + /// > see the [_Wikipedia article_](https://en.wikipedia.org/wiki/Rank_(linear_algebra)) + /// > for more information + /// > + /// > - the rank is always smaller than the min between the height and the + /// > width of any matrix. + /// > - a square and invertible matrix will have _full rank_, i.e. it will + /// > be equal to its size. pub fn rank(&self) -> usize { let mut mat = self.clone(); let mut i = 0; @@ -201,6 +286,14 @@ impl<T: Field> Matrix<T> { nb_non_zero_rows } + /// compute the matrix multiplication with another matrix + /// + /// if `mat` represents a matrix $A$ and `rhs` is the representation of + /// another matrix $B$, then `mat.mul(rhs)` will compute $A \times B$ + /// + /// > **Note** + /// > both matrices should have compatible shapes, i.e. if `self` has shape + /// > `(n, m)` and `rhs` has shape `(p, q)`, then `m == p`. pub(super) fn mul(&self, rhs: &Self) -> Result<Self, KomodoError> { if self.width != rhs.height { return Err(KomodoError::IncompatibleMatrixShapes( @@ -231,6 +324,10 @@ impl<T: Field> Matrix<T> { }) } + /// compute the transpose of the matrix + /// + /// > **Note** + /// > see the [_Wikipedia article_](https://en.wikipedia.org/wiki/Transpose) pub(super) fn transpose(&self) -> Self { let height = self.width; let width = self.height; @@ -251,6 +348,11 @@ impl<T: Field> Matrix<T> { } } + /// truncate the matrix to the provided shape, from right and bottom + /// + /// # Example + /// if a matrix has shape `(10, 11)` and is truncated to `(5, 7)`, the 5 + /// bottom rows and 4 right columns will be removed. pub(super) fn truncate(&self, rows: Option<usize>, cols: Option<usize>) -> Self { let width = if let Some(w) = cols { self.width - w @@ -326,6 +428,8 @@ mod tests { use super::{KomodoError, Matrix}; + // two wrapped functions to make the tests more readable + fn vec_to_elements<T: Field>(elements: Vec<u128>) -> Vec<T> { elements.iter().map(|&x| T::from(x)).collect() } diff --git a/src/setup.rs b/src/setup.rs index 9429c8af..9b6cb97d 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -1,3 +1,4 @@ +//! create and interact with ZK trusted setups use std::ops::Div; use ark_ec::pairing::Pairing; @@ -7,6 +8,7 @@ use ark_poly_commit::kzg10::{Powers, UniversalParams, VerifierKey, KZG10}; use ark_std::test_rng; /// Specializes the public parameters for a given maximum degree `d` for polynomials +/// /// `d` should be less that `pp.max_degree()`. /// /// > see [`ark-poly-commit::kzg10::tests::KZG10`](https://github.com/jdetchart/poly-commit/blob/master/src/kzg10/mod.rs#L509) @@ -35,6 +37,13 @@ pub fn trim<E: Pairing>( (powers, vk) } +/// build a random trusted setup for a given number of bytes +/// +/// `nb_bytes` will be divided by the "_modulus size_" of the elliptic curve to +/// get the number of powers of the secret to generate, e.g. creating a trusted +/// setup for 10kib on BLS-12-381 requires 331 powers of $\tau$. +/// +/// /!\ Should be used only for tests, not for any real world usage. /!\ pub fn random<E, P>(nb_bytes: usize) -> Result<Powers<'static, E>, ark_poly_commit::Error> where E: Pairing, -- GitLab From f45939bb6d432e9c7587c010ff63a8112566983b Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 14:03:29 +0100 Subject: [PATCH 11/22] remove the tests "with padding" --- src/lib.rs | 35 ----------------------------------- 1 file changed, 35 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index fb0f463e..4e6b09c4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -328,30 +328,6 @@ mod tests { verify_template::<E, P>(&bytes, &encoding_mat, &batch) .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - verify_template::<E, P>(&bytes[0..(bytes.len() - 10)], &encoding_mat, &batch) - .unwrap_or_else(|_| { - panic!("verification failed for bls12-381 with padding\n{test_case}") - }); - } - - #[ignore = "Semi-AVID-PR does not support large padding"] - #[test] - fn verification_with_large_padding() { - type E = Bls12_381; - type P = UniPoly381; - - let (k, n) = (4, 6); - let batch = [1, 2, 3]; - - let bytes = bytes(); - let encoding_mat = Matrix::random(k, n); - - let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - - verify_template::<E, P>(&bytes[0..(bytes.len() - 33)], &encoding_mat, &batch) - .unwrap_or_else(|_| { - panic!("verification failed for bls12-381 with padding\n{test_case}") - }); } fn verify_with_errors_template<E, P>( @@ -411,10 +387,6 @@ mod tests { verify_with_errors_template::<E, P>(&bytes, &encoding_mat) .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - verify_with_errors_template::<E, P>(&bytes[0..(bytes.len() - 10)], &encoding_mat) - .unwrap_or_else(|_| { - panic!("verification failed for bls12-381 with padding\n{test_case}") - }); } fn verify_recoding_template<E, P>( @@ -457,10 +429,6 @@ mod tests { verify_recoding_template::<E, P>(&bytes, &encoding_mat) .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - verify_recoding_template::<E, P>(&bytes[0..(bytes.len() - 10)], &encoding_mat) - .unwrap_or_else(|_| { - panic!("verification failed for bls12-381 with padding\n{test_case}") - }); } fn end_to_end_template<E, P>( @@ -497,9 +465,6 @@ mod tests { end_to_end_template::<E, P>(&bytes, &encoding_mat) .unwrap_or_else(|_| panic!("end to end failed for bls12-381\n{test_case}")); - end_to_end_template::<E, P>(&bytes[0..(bytes.len() - 10)], &encoding_mat).unwrap_or_else( - |_| panic!("end to end failed for bls12-381 with padding\n{test_case}"), - ); } fn end_to_end_with_recoding_template<E, P>(bytes: &[u8]) -> Result<(), ark_poly_commit::Error> -- GitLab From 1367e20eee81b593410470e02f99aad3bb6dec20 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 14:07:26 +0100 Subject: [PATCH 12/22] remove `batch_verify` maybe one day we'll find a way to do that, but for now it's pretty useless. --- src/lib.rs | 55 +++--------------------------------------------------- 1 file changed, 3 insertions(+), 52 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 4e6b09c4..03c0988b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -238,25 +238,6 @@ where Ok(Into::<E::G1>::into(commit.0) == rhs) } -/// verify at once that a bunch of encoded and proven blocks are valid -pub fn batch_verify<E, P>( - blocks: &[Block<E>], - verifier_key: &Powers<E>, -) -> Result<bool, ark_poly_commit::Error> -where - E: Pairing, - P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, - for<'a, 'b> &'a P: Div<&'b P, Output = P>, -{ - for block in blocks { - if !verify(block, verifier_key)? { - return Ok(false); - } - } - - Ok(true) -} - #[cfg(test)] mod tests { use std::ops::{Div, Mul}; @@ -268,10 +249,10 @@ mod tests { use ark_poly_commit::kzg10::Commitment; use crate::{ - batch_verify, encode, + encode, fec::{decode, Shard}, linalg::Matrix, - recode, setup, verify, Block, + recode, setup, verify, }; type UniPoly381 = DensePolynomial<<Bls12_381 as Pairing>::ScalarField>; @@ -283,7 +264,6 @@ mod tests { fn verify_template<E, P>( bytes: &[u8], encoding_mat: &Matrix<E::ScalarField>, - batch: &[usize], ) -> Result<(), ark_poly_commit::Error> where E: Pairing, @@ -297,19 +277,6 @@ mod tests { assert!(verify::<E, P>(block, &powers)?); } - assert!(batch_verify( - &blocks - .iter() - .enumerate() - .filter_map(|(i, b)| if batch.contains(&i) { - Some(b.clone()) - } else { - None - }) - .collect::<Vec<_>>(), - &powers - )?); - Ok(()) } @@ -319,14 +286,13 @@ mod tests { type P = UniPoly381; let (k, n) = (4, 6); - let batch = [1, 2, 3]; let bytes = bytes(); let encoding_mat = Matrix::random(k, n); let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - verify_template::<E, P>(&bytes, &encoding_mat, &batch) + verify_template::<E, P>(&bytes, &encoding_mat) .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); } @@ -339,8 +305,6 @@ mod tests { P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { - let k = encoding_mat.height; - let powers = setup::random(bytes.len())?; let blocks = encode::<E, P>(bytes, encoding_mat, &powers)?; @@ -357,19 +321,6 @@ mod tests { assert!(!verify::<E, P>(&corrupted_block, &powers)?); - // let's build some blocks containing errors - let mut blocks_with_errors = Vec::new(); - - let bk = blocks.get(k).unwrap(); - blocks_with_errors.push(Block { - shard: bk.shard.clone(), - commit: bk.commit.clone(), - }); - assert!(batch_verify(blocks_with_errors.as_slice(), &powers)?); - - blocks_with_errors.push(corrupted_block); - assert!(!batch_verify(blocks_with_errors.as_slice(), &powers)?); - Ok(()) } -- GitLab From 03fa1acacc300cc150f2167502f088188add3684 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 14:16:58 +0100 Subject: [PATCH 13/22] refactor the main tests --- src/lib.rs | 126 ++++++++++++++++++++++------------------------------- 1 file changed, 53 insertions(+), 73 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 03c0988b..b0ee456f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -280,22 +280,6 @@ mod tests { Ok(()) } - #[test] - fn verification() { - type E = Bls12_381; - type P = UniPoly381; - - let (k, n) = (4, 6); - - let bytes = bytes(); - let encoding_mat = Matrix::random(k, n); - - let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - - verify_template::<E, P>(&bytes, &encoding_mat) - .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - } - fn verify_with_errors_template<E, P>( bytes: &[u8], encoding_mat: &Matrix<E::ScalarField>, @@ -324,22 +308,6 @@ mod tests { Ok(()) } - #[test] - fn verification_with_errors() { - type E = Bls12_381; - type P = UniPoly381; - - let (k, n) = (4, 6); - - let bytes = bytes(); - let encoding_mat = Matrix::random(k, n); - - let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - - verify_with_errors_template::<E, P>(&bytes, &encoding_mat) - .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - } - fn verify_recoding_template<E, P>( bytes: &[u8], encoding_mat: &Matrix<E::ScalarField>, @@ -366,22 +334,6 @@ mod tests { Ok(()) } - #[test] - fn verify_recoding() { - type E = Bls12_381; - type P = UniPoly381; - - let (k, n) = (4, 6); - - let bytes = bytes(); - let encoding_mat = Matrix::random(k, n); - - let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - - verify_recoding_template::<E, P>(&bytes, &encoding_mat) - .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); - } - fn end_to_end_template<E, P>( bytes: &[u8], encoding_mat: &Matrix<E::ScalarField>, @@ -402,30 +354,17 @@ mod tests { Ok(()) } - #[test] - fn end_to_end() { - type E = Bls12_381; - type P = UniPoly381; - - let (k, n) = (4, 6); - - let bytes = bytes(); - let encoding_mat = Matrix::random(k, n); - - let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); - - end_to_end_template::<E, P>(&bytes, &encoding_mat) - .unwrap_or_else(|_| panic!("end to end failed for bls12-381\n{test_case}")); - } - - fn end_to_end_with_recoding_template<E, P>(bytes: &[u8]) -> Result<(), ark_poly_commit::Error> + fn end_to_end_with_recoding_template<E, P>( + bytes: &[u8], + encoding_mat: &Matrix<E::ScalarField>, + ) -> Result<(), ark_poly_commit::Error> where E: Pairing, P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { let powers = setup::random(bytes.len())?; - let blocks = encode::<E, P>(bytes, &Matrix::random(3, 5), &powers)?; + let blocks = encode::<E, P>(bytes, encoding_mat, &powers)?; let b_0_1 = recode(&blocks[0..=1]).unwrap().unwrap(); let shards = vec![ @@ -461,16 +400,57 @@ mod tests { Ok(()) } - #[test] - fn end_to_end_with_recoding() { - type E = Bls12_381; - type P = UniPoly381; + // NOTE: this is part of an experiment, to be honest, to be able to see how + // much these tests could be refactored and simplified + fn run_template<E, T, P, F>(test: F) + where + E: Pairing, + T: Field, + F: Fn(&[u8], &Matrix<T>) -> Result<(), ark_poly_commit::Error>, + P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, + for<'a, 'b> &'a P: Div<&'b P, Output = P>, + { + let (k, n) = (4, 6); let bytes = bytes(); + let encoding_mat = Matrix::random(k, n); - let test_case = format!("TEST | data: {} bytes", bytes.len()); + let test_case = format!("TEST | data: {} bytes, k: {}, n: {}", bytes.len(), k, n); + + test(&bytes, &encoding_mat) + .unwrap_or_else(|_| panic!("verification failed for bls12-381\n{test_case}")); + } + + #[test] + fn verification() { + run_template::<Bls12_381, _, UniPoly381, _>(verify_template::<Bls12_381, UniPoly381>); + } + + #[test] + fn verify_with_errors() { + run_template::<Bls12_381, _, UniPoly381, _>( + verify_with_errors_template::<Bls12_381, UniPoly381>, + ); + } - end_to_end_with_recoding_template::<E, P>(&bytes) - .unwrap_or_else(|_| panic!("end to end failed for bls12-381\n{test_case}")); + #[test] + fn verify_recoding() { + run_template::<Bls12_381, _, UniPoly381, _>( + verify_recoding_template::<Bls12_381, UniPoly381>, + ); + } + + #[test] + fn end_to_end() { + run_template::<Bls12_381, _, UniPoly381, _>( + end_to_end_template::<Bls12_381, UniPoly381>, + ); + } + + #[test] + fn end_to_end_with_recoding() { + run_template::<Bls12_381, _, UniPoly381, _>( + end_to_end_with_recoding_template::<Bls12_381, UniPoly381>, + ); } } -- GitLab From d597e0ecac78f2cd9317aec49a27b942ab9e2ac1 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Mon, 25 Mar 2024 15:21:25 +0100 Subject: [PATCH 14/22] format --- src/lib.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index b0ee456f..c3646070 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -442,9 +442,7 @@ mod tests { #[test] fn end_to_end() { - run_template::<Bls12_381, _, UniPoly381, _>( - end_to_end_template::<Bls12_381, UniPoly381>, - ); + run_template::<Bls12_381, _, UniPoly381, _>(end_to_end_template::<Bls12_381, UniPoly381>); } #[test] -- GitLab From a88a70c04a82c70169de9238a8538862cb2af1a4 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 09:33:09 +0100 Subject: [PATCH 15/22] reduce number of source shards to 3 in the tests this is for the "end to end with recoding" test to pass because the decoding step uses only 3 shards, thus $k$ needs to be equal to 3 at most. --- src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index c3646070..e9cfcc6d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -410,7 +410,7 @@ mod tests { P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, for<'a, 'b> &'a P: Div<&'b P, Output = P>, { - let (k, n) = (4, 6); + let (k, n) = (3, 6); let bytes = bytes(); let encoding_mat = Matrix::random(k, n); -- GitLab From d11bcd028b2bc8da564bb2d2d8c2e998122da5b8 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 12:57:44 +0100 Subject: [PATCH 16/22] add documentation to the functions from !47 --- src/fs.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/fs.rs b/src/fs.rs index b7da1614..bd940f45 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -16,6 +16,12 @@ use crate::Block; const COMPRESS: Compress = Compress::Yes; const VALIDATE: Validate = Validate::Yes; +/// dump any serializable object to the disk +/// +/// - `dumpable` can be anything that is _serializable_ +/// - if `filename` is provided, then it will be used as the filename as is +/// - otherwise, the hash of the _dumpable_ will be computed and used as the +/// filename pub fn dump( dumpable: &impl CanonicalSerialize, dump_dir: &Path, @@ -43,6 +49,8 @@ pub fn dump( Ok(dump_path) } +/// dump a bunch of blocks to the disk and return a JSON / NUON compatible table +/// of all the hashes that have been dumped pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Result<String> { info!("dumping blocks to `{:?}`", block_dir); let mut hashes = vec![]; @@ -61,6 +69,7 @@ pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Resu Ok(formatted_output) } +/// read blocks from a list of block hashes pub fn read_blocks<E: Pairing>( block_hashes: &[String], block_dir: &Path, -- GitLab From dc1be6a91e33b93031c03f24c448c39a5f476ec7 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 12:58:18 +0100 Subject: [PATCH 17/22] return name of dumped file only from `fs::dump` cc @n.dissoubray --- src/fs.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/fs.rs b/src/fs.rs index bd940f45..e1068441 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -22,11 +22,14 @@ const VALIDATE: Validate = Validate::Yes; /// - if `filename` is provided, then it will be used as the filename as is /// - otherwise, the hash of the _dumpable_ will be computed and used as the /// filename +/// +/// this function will return the name of the file the _dumpable_ has been +/// dumped to. pub fn dump( dumpable: &impl CanonicalSerialize, dump_dir: &Path, filename: Option<&str>, -) -> Result<PathBuf> { +) -> Result<String> { info!("serializing the dumpable"); let mut serialized = vec![0; dumpable.serialized_size(COMPRESS)]; dumpable.serialize_with_mode(&mut serialized[..], COMPRESS)?; @@ -40,13 +43,13 @@ pub fn dump( .join(""), }; - let dump_path = dump_dir.join(filename); + let dump_path = dump_dir.join(&filename); info!("dumping dumpable into `{:?}`", dump_path); let mut file = File::create(&dump_path)?; file.write_all(&serialized)?; - Ok(dump_path) + Ok(filename) } /// dump a bunch of blocks to the disk and return a JSON / NUON compatible table @@ -56,8 +59,8 @@ pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Resu let mut hashes = vec![]; std::fs::create_dir_all(block_dir)?; for block in blocks.iter() { - let filename = dump(block, block_dir, None)?; - hashes.push(filename); + let hash = dump(block, block_dir, None)?; + hashes.push(hash); } let mut formatted_output = String::from("["); -- GitLab From 38ce5a0824af646bf7562486ab701b37c19de9fe Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 13:10:19 +0100 Subject: [PATCH 18/22] use `push_str` to simplify `dump_blocks` --- src/fs.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/fs.rs b/src/fs.rs index e1068441..001714cb 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -65,9 +65,9 @@ pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Resu let mut formatted_output = String::from("["); for hash in &hashes { - formatted_output = format!("{}{:?},", formatted_output, hash); + formatted_output.push_str(&format!("{:?},", hash)); } - formatted_output = format!("{}{}", formatted_output, "]"); + formatted_output.push_str("]"); Ok(formatted_output) } -- GitLab From f2d06c41015e374806acf3dc8af6f7be3ec14cde Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 13:15:49 +0100 Subject: [PATCH 19/22] no-ci: empty -- GitLab From 894681128f33d7ce656e7242d4b3f60cb3f10643 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 15:08:15 +0100 Subject: [PATCH 20/22] add documentation to the fs and field modules --- src/field.rs | 1 + src/fs.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/src/field.rs b/src/field.rs index e3e79115..7aaa6e9c 100644 --- a/src/field.rs +++ b/src/field.rs @@ -1,3 +1,4 @@ +//! manipulate finite field elements use ark_ec::pairing::Pairing; use ark_ff::{BigInteger, PrimeField}; use ark_std::One; diff --git a/src/fs.rs b/src/fs.rs index 001714cb..86bd2d35 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -1,3 +1,4 @@ +//! interact with the filesystem, read from and write to it use std::{ fs::File, io::prelude::*, -- GitLab From 6acd91729f9bb61ee8426e8dee424113e6131e82 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 15:20:23 +0100 Subject: [PATCH 21/22] fix benchmarks --- benches/recoding.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benches/recoding.rs b/benches/recoding.rs index 01069027..5769f10c 100644 --- a/benches/recoding.rs +++ b/benches/recoding.rs @@ -26,7 +26,7 @@ fn create_fake_shard<E: Pairing>(nb_bytes: usize, k: usize) -> Shard<E> { k: k as u32, linear_combination, hash: vec![], - bytes: field::split_data_into_field_elements::<E>(&bytes, 1), + data: field::split_data_into_field_elements::<E>(&bytes, 1), size: 0, } } -- GitLab From a3f3f2d42f145ba73bafab082479b5d9afccddfc Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Tue, 26 Mar 2024 15:20:34 +0100 Subject: [PATCH 22/22] make clippy happy --- src/fs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fs.rs b/src/fs.rs index 86bd2d35..e48e3133 100644 --- a/src/fs.rs +++ b/src/fs.rs @@ -68,7 +68,7 @@ pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Resu for hash in &hashes { formatted_output.push_str(&format!("{:?},", hash)); } - formatted_output.push_str("]"); + formatted_output.push(']'); Ok(formatted_output) } -- GitLab