Skip to content

Commit

Permalink
implemented canonical serialization
Browse files Browse the repository at this point in the history
  • Loading branch information
TakodaS committed Sep 7, 2023
1 parent 2e93ba9 commit a4b81ef
Show file tree
Hide file tree
Showing 7 changed files with 92 additions and 102 deletions.
7 changes: 2 additions & 5 deletions src/ahp/constraint_systems.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,8 @@ use crate::BTreeMap;
use ark_ff::{Field, PrimeField};
use ark_poly::{EvaluationDomain, Evaluations as EvaluationsOnDomain, GeneralEvaluationDomain};
use ark_relations::{lc, r1cs::ConstraintSystemRef};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
use ark_std::{
cfg_iter_mut,
io::{Read, Write},
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::cfg_iter_mut;
use derivative::Derivative;

/* ************************************************************************* */
Expand Down
21 changes: 15 additions & 6 deletions src/ahp/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use ark_poly::{EvaluationDomain, GeneralEvaluationDomain};
use ark_relations::r1cs::{
ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError, SynthesisMode,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{
io::{Read, Write},

Check failure on line 17 in src/ahp/indexer.rs

View workflow job for this annotation

GitHub Actions / Check no_std

unused import: `Read`
marker::PhantomData,
Expand Down Expand Up @@ -60,11 +60,20 @@ impl<F> IndexInfo<F> {
}
}

impl<F: PrimeField> ark_ff::ToBytes for IndexInfo<F> {
fn write<W: Write>(&self, mut w: W) -> ark_std::io::Result<()> {
(self.num_variables as u64).write(&mut w)?;
(self.num_constraints as u64).write(&mut w)?;
(self.num_non_zero as u64).write(&mut w)
impl<F: PrimeField> CanonicalSerialize for IndexInfo<F> {
fn serialize_with_mode<W: Write>(
&self,
writer: W,
compress: ark_serialize::Compress,
) -> Result<(), ark_serialize::SerializationError> {

(self.num_variables as u64).write(&mut writer)?;
(self.num_constraints as u64).write(&mut writer)?;
(self.num_non_zero as u64).write(&mut writer)
}

fn serialized_size(&self) -> usize {
3 * ark_std::mem::size_of::<u64>()
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/ahp/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ mod tests {
use ark_ff::{One, UniformRand, Zero};
use ark_poly::{
univariate::{DenseOrSparsePolynomial, DensePolynomial},
Polynomial, UVPolynomial,
Polynomial, DenseUVPolynomial,
};

#[test]
Expand Down
83 changes: 15 additions & 68 deletions src/ahp/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,13 @@ use crate::{ToString, Vec};
use ark_ff::{Field, PrimeField, Zero};
use ark_poly::{
univariate::DensePolynomial, EvaluationDomain, Evaluations as EvaluationsOnDomain,
GeneralEvaluationDomain, Polynomial, UVPolynomial,
GeneralEvaluationDomain, Polynomial, DenseUVPolynomial,
};
use ark_relations::r1cs::{
ConstraintSynthesizer, ConstraintSystem, OptimizationGoal, SynthesisError,
};
use ark_serialize::Compress;
use ark_serialize::Validate;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
use ark_std::rand::RngCore;
use ark_std::{
Expand Down Expand Up @@ -72,86 +74,31 @@ pub enum ProverMsg<F: Field> {
FieldElements(Vec<F>),
}

impl<F: Field> ark_ff::ToBytes for ProverMsg<F> {
fn write<W: Write>(&self, w: W) -> ark_std::io::Result<()> {
match self {
ProverMsg::EmptyMessage => Ok(()),
ProverMsg::FieldElements(field_elems) => field_elems.write(w),
}
}
}

impl<F: Field> CanonicalSerialize for ProverMsg<F> {
fn serialize<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
let res: Option<Vec<F>> = match self {
ProverMsg::EmptyMessage => None,
ProverMsg::FieldElements(v) => Some(v.clone()),
};
res.serialize(&mut writer)
}

fn serialized_size(&self) -> usize {
let res: Option<Vec<F>> = match self {
ProverMsg::EmptyMessage => None,
ProverMsg::FieldElements(v) => Some(v.clone()),
};
res.serialized_size()
}

fn serialize_unchecked<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
fn serialize_with_mode<W: Write>(&self, mut writer: W, compress: Compress) -> Result<(), SerializationError> {
let res: Option<Vec<F>> = match self {
ProverMsg::EmptyMessage => None,
ProverMsg::FieldElements(v) => Some(v.clone()),
ProverMsg::FieldElements(v) => v.serialize_with_mode(writer, compress),
};
res.serialize_unchecked(&mut writer)
Ok(res)
}

fn serialize_uncompressed<W: Write>(&self, mut writer: W) -> Result<(), SerializationError> {
fn serialized_size(&self, compress: Compress) -> usize {
let res: Option<Vec<F>> = match self {
ProverMsg::EmptyMessage => None,
ProverMsg::FieldElements(v) => Some(v.clone()),
ProverMsg::EmptyMessage => 0,
ProverMsg::FieldElements(v) => v.serialized_size(compress),
};
res.serialize_uncompressed(&mut writer)
Ok(res)
}
}

fn uncompressed_size(&self) -> usize {
impl<F: Field> CanonicalDeserialize for ProverMsg<F> {
fn deserialize_with_mode<R: Read>(&self, mut reader: R, compress:Compress, validate: Validate) -> Result<Self, SerializationError> {
let res: Option<Vec<F>> = match self {
ProverMsg::EmptyMessage => None,
ProverMsg::FieldElements(v) => Some(v.clone()),
ProverMsg::FieldElements(v) => v.deserialize_with_mode(reader, compress, validate),
};
res.uncompressed_size()
}
}

impl<F: Field> CanonicalDeserialize for ProverMsg<F> {
fn deserialize<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
let res = Option::<Vec<F>>::deserialize(&mut reader)?;

if let Some(res) = res {
Ok(ProverMsg::FieldElements(res))
} else {
Ok(ProverMsg::EmptyMessage)
}
}

fn deserialize_unchecked<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
let res = Option::<Vec<F>>::deserialize_unchecked(&mut reader)?;

if let Some(res) = res {
Ok(ProverMsg::FieldElements(res))
} else {
Ok(ProverMsg::EmptyMessage)
}
}

fn deserialize_uncompressed<R: Read>(mut reader: R) -> Result<Self, SerializationError> {
let res = Option::<Vec<F>>::deserialize_uncompressed(&mut reader)?;

if let Some(res) = res {
Ok(ProverMsg::FieldElements(res))
} else {
Ok(ProverMsg::EmptyMessage)
}
Ok(res)
}
}

Expand Down
36 changes: 24 additions & 12 deletions src/data_structures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ use crate::Vec;
use ark_ff::PrimeField;
use ark_poly::univariate::DensePolynomial;
use ark_poly_commit::{BatchLCProof, PolynomialCommitment};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{
format,
io::{Read, Write},
io::Write,
};

/* ************************************************************************* */
Expand All @@ -33,12 +33,24 @@ pub struct IndexVerifierKey<F: PrimeField, PC: PolynomialCommitment<F, DensePoly
pub verifier_key: PC::VerifierKey,
}

impl<F: PrimeField, PC: PolynomialCommitment<F, DensePolynomial<F>>> ark_ff::ToBytes
impl<F: PrimeField, PC: PolynomialCommitment<F, DensePolynomial<F>>> CanonicalSerialize

Check failure on line 36 in src/data_structures.rs

View workflow job for this annotation

GitHub Actions / Check no_std

trait takes 3 generic arguments but 2 generic arguments were supplied

Check failure on line 36 in src/data_structures.rs

View workflow job for this annotation

GitHub Actions / Test (stable)

trait takes 3 generic arguments but 2 generic arguments were supplied
for IndexVerifierKey<F, PC>
{
fn write<W: Write>(&self, mut w: W) -> ark_std::io::Result<()> {
self.index_info.write(&mut w)?;
self.index_comms.write(&mut w)
fn serialize_with_mode<W: Write>(
&self,
writer: W,
compress: ark_serialize::Compress,
) -> Result<(), ark_serialize::SerializationError> {
self.index_info.serialize_with_mode(writer, compress)?;
self.index_comms.serialize_with_mode(writer, compress)?;
self.verifier_key.serialize_with_mode(writer, compress)?;
Ok(())
}

fn serialized_size(&self, compress: ark_serialize::Compress) -> usize {
self.index_info.serialized_size(compress)
+ self.index_comms.serialized_size(compress)
+ self.verifier_key.serialized_size(compress)
}
}

Expand Down Expand Up @@ -136,19 +148,19 @@ impl<F: PrimeField, PC: PolynomialCommitment<F, DensePolynomial<F>>> Proof<F, PC
for c in self.commitments.iter().flat_map(|c| c) {
if !c.has_degree_bound() {
num_comms_without_degree_bounds += 1;
size_bytes_comms_without_degree_bounds += c.serialized_size();
size_bytes_comms_without_degree_bounds += c.compressed_size();
} else {
num_comms_with_degree_bounds += 1;
size_bytes_comms_with_degree_bounds += c.serialized_size();
size_bytes_comms_with_degree_bounds += c.compressed_size();
}
}

let proofs: Vec<PC::Proof> = self.pc_proof.proof.clone().into();
let num_proofs = proofs.len();
let size_bytes_proofs = self.pc_proof.proof.serialized_size();
let size_bytes_proofs = self.pc_proof.proof.compressed_size();

let num_evals = self.evaluations.len();
let evals_size_in_bytes = self.evaluations.serialized_size();
let evals_size_in_bytes = self.evaluations.compressed_size();
let num_prover_messages: usize = self
.prover_messages
.iter()
Expand All @@ -157,8 +169,8 @@ impl<F: PrimeField, PC: PolynomialCommitment<F, DensePolynomial<F>>> Proof<F, PC
ProverMsg::FieldElements(elems) => elems.len(),
})
.sum();
let prover_msg_size_in_bytes = self.prover_messages.serialized_size();
let arg_size = self.serialized_size();
let prover_msg_size_in_bytes = self.prover_messages.compressed_size();
let arg_size = self.compressed_size();
let stats = format!(
"Argument size in bytes: {}\n\n\
Number of commitments without degree bounds: {}\n\
Expand Down
31 changes: 28 additions & 3 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@
//! matrices are square). Furthermore, Marlin only supports instances where the
//! public inputs are of size one less than a power of 2 (i.e., 2^n - 1).
#![deny(unused_import_braces, unused_qualifications, trivial_casts)]
#![deny(trivial_numeric_casts, private_in_public)]
#![deny(trivial_numeric_casts)]
#![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)]
#![deny(unused_attributes, unused_imports, unused_mut, missing_docs)]
#![deny(renamed_and_removed_lints, stable_features, unused_allocation)]
#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)]
#![deny(unused_comparisons, bare_trait_objects, unused_must_use)]
#![forbid(unsafe_code)]

#[macro_use]
extern crate ark_std;

use ark_ff::{to_bytes, PrimeField, UniformRand};
use ark_ff::{PrimeField, UniformRand};
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain};
use ark_poly_commit::Evaluations;
use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment};
Expand All @@ -33,13 +33,38 @@ use ark_std::{
vec,
vec::Vec,
};
use ark_serialize::CanonicalSerialize;

#[cfg(not(feature = "std"))]
macro_rules! eprintln {
() => {};
($($arg: tt)*) => {};
}

/// Takes as input a sequence of structs, and converts them to a series of
/// bytes. All traits that implement `Bytes` can be automatically converted to
/// bytes in this manner.
#[macro_export]
macro_rules! to_bytes {
($($x:expr),*) => ({
let mut buf = $crate::vec![];
{$crate::push_to_vec!(buf, $($x),*)}.map(|_| buf)
});
}

#[doc(hidden)]
#[macro_export]
macro_rules! push_to_vec {
($buf:expr, $y:expr, $($x:expr),*) => ({
{
$crate::CanonicalSerialize::write(&$y, &mut $buf)
}.and({$crate::push_to_vec!($buf, $($x),*)})
});

($buf:expr, $x:expr) => ({
$crate::CanonicalSerialize::write(&$x, &mut $buf)
})
}
/// Implements a Fiat-Shamir based Rng that allows one to incrementally update
/// the seed based on new messages in the proof transcript.
pub mod rng;
Expand Down
14 changes: 7 additions & 7 deletions src/rng.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::Vec;
use ark_ff::{FromBytes, ToBytes};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::convert::From;
use ark_std::marker::PhantomData;
use ark_std::rand::{RngCore, SeedableRng};
Expand All @@ -8,9 +8,9 @@ use digest::Digest;
/// An RNG suitable for Fiat-Shamir transforms
pub trait FiatShamirRng: RngCore {
/// Create a new `Self` with an initial input
fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self;
fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self;
/// Absorb new inputs into state
fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T);
fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T);
}

/// A simple `FiatShamirRng` that refreshes its seed by hashing together the previous seed
Expand Down Expand Up @@ -51,12 +51,12 @@ where
/// Create a new `Self` by initializing with a fresh seed.
/// `self.seed = H(initial_input)`.
#[inline]
fn initialize<'a, T: 'a + ToBytes>(initial_input: &'a T) -> Self {
fn initialize<'a, T: 'a + CanonicalSerialize>(initial_input: &'a T) -> Self {
let mut bytes = Vec::new();
initial_input
.write(&mut bytes)
.expect("failed to convert to bytes");
let seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]");
let seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]");
let r = R::from_seed(<R::Seed>::from(seed));
Self {
r,
Expand All @@ -68,13 +68,13 @@ where
/// Refresh `self.seed` with new material. Achieved by setting
/// `self.seed = H(new_input || self.seed)`.
#[inline]
fn absorb<'a, T: 'a + ToBytes>(&mut self, new_input: &'a T) {
fn absorb<'a, T: 'a + CanonicalSerialize>(&mut self, new_input: &'a T) {
let mut bytes = Vec::new();
new_input
.write(&mut bytes)
.expect("failed to convert to bytes");
bytes.extend_from_slice(&self.seed);
self.seed = FromBytes::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]");
self.seed = CanonicalDeserialize::read(D::digest(&bytes).as_ref()).expect("failed to get [u8; 32]");
self.r = R::from_seed(<R::Seed>::from(self.seed));
}
}

0 comments on commit a4b81ef

Please sign in to comment.