diff --git a/src/ahp/constraint_systems.rs b/src/ahp/constraint_systems.rs index ddac9ca..bee9525 100644 --- a/src/ahp/constraint_systems.rs +++ b/src/ahp/constraint_systems.rs @@ -6,11 +6,8 @@ use crate::BTreeMap; use ark_ff::{Field, PrimeField}; use ark_poly::{EvaluationDomain, Evaluations as EvaluationsOnDomain, GeneralEvaluationDomain}; use ark_relations::{lc, r1cs::ConstraintSystemRef}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; -use ark_std::{ - cfg_iter_mut, - io::{Read, Write}, -}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use ark_std::cfg_iter_mut; use derivative::Derivative; /* ************************************************************************* */ diff --git a/src/ahp/indexer.rs b/src/ahp/indexer.rs index b43c580..cdf2f32 100644 --- a/src/ahp/indexer.rs +++ b/src/ahp/indexer.rs @@ -12,7 +12,7 @@ use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, SynthesisMode, }; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{ io::{Read, Write}, marker::PhantomData, @@ -60,11 +60,20 @@ impl IndexInfo { } } -impl ark_ff::ToBytes for IndexInfo { - fn write(&self, mut w: W) -> ark_std::io::Result<()> { - (self.num_variables as u64).write(&mut w)?; - (self.num_constraints as u64).write(&mut w)?; - (self.num_non_zero as u64).write(&mut w) +impl CanonicalSerialize for IndexInfo { + fn serialize_with_mode( + &self, + writer: W, + compress: ark_serialize::Compress, + ) -> Result<(), ark_serialize::SerializationError> { + + (self.num_variables as u64).write(&mut writer)?; + (self.num_constraints as u64).write(&mut writer)?; + (self.num_non_zero as u64).write(&mut writer) + } + + fn serialized_size(&self) -> usize { + 3 * ark_std::mem::size_of::() } } diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index 383a6e8..e2bc747 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -334,7 +334,7 @@ mod tests { use ark_ff::{One, UniformRand, Zero}; use ark_poly::{ univariate::{DenseOrSparsePolynomial, DensePolynomial}, - Polynomial, UVPolynomial, + Polynomial, DenseUVPolynomial, }; #[test] diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 60039f7..44902ee 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -11,11 +11,13 @@ use crate::{ToString, Vec}; use ark_ff::{Field, PrimeField, Zero}; use ark_poly::{ univariate::DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOnDomain, - GeneralEvaluationDomain, Polynomial, UVPolynomial, + GeneralEvaluationDomain, Polynomial, DenseUVPolynomial, }; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, }; +use ark_serialize::Compress; +use ark_serialize::Validate; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; use ark_std::rand::RngCore; use ark_std::{ @@ -72,86 +74,31 @@ pub enum ProverMsg { FieldElements(Vec), } -impl ark_ff::ToBytes for ProverMsg { - fn write(&self, w: W) -> ark_std::io::Result<()> { - match self { - ProverMsg::EmptyMessage => Ok(()), - ProverMsg::FieldElements(field_elems) => field_elems.write(w), - } - } -} - impl CanonicalSerialize for ProverMsg { - fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { - let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), - }; - res.serialize(&mut writer) - } - - fn serialized_size(&self) -> usize { - let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), - }; - res.serialized_size() - } - - fn serialize_unchecked(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialize_with_mode(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> { let res: Option> = match self { ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::FieldElements(v) => v.serialize_with_mode(writer, compress), }; - res.serialize_unchecked(&mut writer) + Ok(res) } - fn serialize_uncompressed(&self, mut writer: W) -> Result<(), SerializationError> { + fn serialized_size(&self, compress: Compress) -> usize { let res: Option> = match self { - ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::EmptyMessage => 0, + ProverMsg::FieldElements(v) => v.serialized_size(compress), }; - res.serialize_uncompressed(&mut writer) + Ok(res) } +} - fn uncompressed_size(&self) -> usize { +impl CanonicalDeserialize for ProverMsg { + fn deserialize_with_mode(&self, mut reader: R, compress:Compress, validate: Validate) -> Result { let res: Option> = match self { ProverMsg::EmptyMessage => None, - ProverMsg::FieldElements(v) => Some(v.clone()), + ProverMsg::FieldElements(v) => v.deserialize_with_mode(reader, compress, validate), }; - res.uncompressed_size() - } -} - -impl CanonicalDeserialize for ProverMsg { - fn deserialize(mut reader: R) -> Result { - let res = Option::>::deserialize(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } - } - - fn deserialize_unchecked(mut reader: R) -> Result { - let res = Option::>::deserialize_unchecked(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } - } - - fn deserialize_uncompressed(mut reader: R) -> Result { - let res = Option::>::deserialize_uncompressed(&mut reader)?; - - if let Some(res) = res { - Ok(ProverMsg::FieldElements(res)) - } else { - Ok(ProverMsg::EmptyMessage) - } + Ok(res) } } diff --git a/src/data_structures.rs b/src/data_structures.rs index 66b5643..84b7488 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -4,10 +4,10 @@ use crate::Vec; use ark_ff::PrimeField; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{BatchLCProof, PolynomialCommitment}; -use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{ format, - io::{Read, Write}, + io::Write, }; /* ************************************************************************* */ @@ -33,12 +33,24 @@ pub struct IndexVerifierKey>> ark_ff::ToBytes +impl>> CanonicalSerialize for IndexVerifierKey { - fn write(&self, mut w: W) -> ark_std::io::Result<()> { - self.index_info.write(&mut w)?; - self.index_comms.write(&mut w) + fn serialize_with_mode( + &self, + writer: W, + compress: ark_serialize::Compress, + ) -> Result<(), ark_serialize::SerializationError> { + self.index_info.serialize_with_mode(writer, compress)?; + self.index_comms.serialize_with_mode(writer, compress)?; + self.verifier_key.serialize_with_mode(writer, compress)?; + Ok(()) + } + + fn serialized_size(&self, compress: ark_serialize::Compress) -> usize { + self.index_info.serialized_size(compress) + + self.index_comms.serialized_size(compress) + + self.verifier_key.serialized_size(compress) } } @@ -136,19 +148,19 @@ impl>> Proof = self.pc_proof.proof.clone().into(); let num_proofs = proofs.len(); - let size_bytes_proofs = self.pc_proof.proof.serialized_size(); + let size_bytes_proofs = self.pc_proof.proof.compressed_size(); let num_evals = self.evaluations.len(); - let evals_size_in_bytes = self.evaluations.serialized_size(); + let evals_size_in_bytes = self.evaluations.compressed_size(); let num_prover_messages: usize = self .prover_messages .iter() @@ -157,8 +169,8 @@ impl>> Proof elems.len(), }) .sum(); - let prover_msg_size_in_bytes = self.prover_messages.serialized_size(); - let arg_size = self.serialized_size(); + let prover_msg_size_in_bytes = self.prover_messages.compressed_size(); + let arg_size = self.compressed_size(); let stats = format!( "Argument size in bytes: {}\n\n\ Number of commitments without degree bounds: {}\n\ diff --git a/src/lib.rs b/src/lib.rs index 7ca47b4..aa49b90 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -8,17 +8,17 @@ //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). #![deny(unused_import_braces, unused_qualifications, trivial_casts)] -#![deny(trivial_numeric_casts, private_in_public)] +#![deny(trivial_numeric_casts)] #![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] #![deny(unused_attributes, unused_imports, unused_mut, missing_docs)] #![deny(renamed_and_removed_lints, stable_features, unused_allocation)] -#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] +#![deny(unused_comparisons, bare_trait_objects, unused_must_use)] #![forbid(unsafe_code)] #[macro_use] extern crate ark_std; -use ark_ff::{to_bytes, PrimeField, UniformRand}; +use ark_ff::{PrimeField, UniformRand}; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::Evaluations; use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment}; @@ -33,6 +33,7 @@ use ark_std::{ vec, vec::Vec, }; +use ark_serialize::CanonicalSerialize; #[cfg(not(feature = "std"))] macro_rules! eprintln { @@ -40,6 +41,30 @@ macro_rules! eprintln { ($($arg: tt)*) => {}; } +/// Takes as input a sequence of structs, and converts them to a series of +/// bytes. All traits that implement `Bytes` can be automatically converted to +/// bytes in this manner. +#[macro_export] +macro_rules! to_bytes { + ($($x:expr),*) => ({ + let mut buf = $crate::vec![]; + {$crate::push_to_vec!(buf, $($x),*)}.map(|_| buf) + }); +} + +#[doc(hidden)] +#[macro_export] +macro_rules! push_to_vec { + ($buf:expr, $y:expr, $($x:expr),*) => ({ + { + $crate::CanonicalSerialize::write(&$y, &mut $buf) + }.and({$crate::push_to_vec!($buf, $($x),*)}) + }); + + ($buf:expr, $x:expr) => ({ + $crate::CanonicalSerialize::write(&$x, &mut $buf) + }) +} /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. pub mod rng; diff --git a/src/rng.rs b/src/rng.rs index efea8e5..0bf3689 100644 --- a/src/rng.rs +++ b/src/rng.rs @@ -1,5 +1,5 @@ use crate::Vec; -use ark_ff::{FromBytes, ToBytes}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::convert::From; use ark_std::marker::PhantomData; use ark_std::rand::{RngCore, SeedableRng}; @@ -8,9 +8,9 @@ use digest::Digest; /// An RNG suitable for Fiat-Shamir transforms pub trait FiatShamirRng: RngCore { /// Create a new `Self` with an initial input - fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self; + fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self; /// Absorb new inputs into state - fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T); + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T); } /// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed @@ -51,12 +51,12 @@ where /// Create a new `Self` by initializing with a fresh seed. /// `self.seed = H(initial_input)`. #[inline] - fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self { + fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self { let mut bytes = Vec::new(); initial_input .write(&mut bytes) .expect("failed to convert to bytes"); - let seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); let r = R::from_seed(::from(seed)); Self { r, @@ -68,13 +68,13 @@ where /// Refresh `self.seed` with new material. Achieved by setting /// `self.seed = H(new_input || self.seed)`. #[inline] - fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T) { + fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) { let mut bytes = Vec::new(); new_input .write(&mut bytes) .expect("failed to convert to bytes"); bytes.extend_from_slice(&self.seed); - self.seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); + self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]"); self.r = R::from_seed(::from(self.seed)); } }