Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade to arkworks 0.4.0/2 #246

Merged
merged 5 commits into from
Dec 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
427 changes: 177 additions & 250 deletions Cargo.lock

Large diffs are not rendered by default.

16 changes: 8 additions & 8 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ fs_extra = "1.3.0"
dirs = "4.0"

[dependencies]
ark-ec = "0.3.0" # elliptic curve library
ark-ff = "0.3.0"
ark-bls12-381 = "0.3.0" # bls12-381 curve for r1cs backend
ark-relations = "0.3.0"
ark-bn254 = "0.3.0" # bn128 curve for r1cs backend
ark-serialize = "0.3.0" # serialization of arkworks types
ark-ec = "0.4.2" # elliptic curve library
ark-ff = "0.4.2"
ark-bls12-381 = "0.4.0" # bls12-381 curve for r1cs backend
ark-relations = "0.4.0"
ark-bn254 = "0.4.0" # bn128 curve for r1cs backend
ark-serialize = "0.4.2" # serialization of arkworks types
axum = { version = "0.7.7", features = ["macros"] } # web server
base64 = "0.22.1" # for base64 encoding
educe = { version = "0.6", default-features = false, features = [
Expand All @@ -32,8 +32,7 @@ camino = "1.1.1" # to replace Path and PathBuf
clap = { version = "4.0.5", features = ["derive"] } # CLI library
dirs = "4.0.0" # helper functions (e.g. getting the home directory)
itertools = "0.10.3" # useful iter traits
kimchi = { git = "https://github.com/o1-labs/proof-systems", rev = "a5d8883ddf649c22f38aaac122d368ecb9fa2230" } # ZKP - Dec 5th, 2023 revision
#kimchi = { git = "https://github.com/o1-labs/proof-systems", rev = "b9589626f834f9dbf9d587e73fd8176171231e90" } # ZKP
kimchi = { git = "https://github.com/o1-labs/proof-systems", rev = "5b4ac1437e7912237be88d97b4b4891b22e3e61f" } # ZKP
miette = { version = "5.0.0", features = ["fancy"] } # nice errors
num-traits = "0.2.15" # useful traits on big ints
once_cell = "1.15.0" # for lazy statics
Expand All @@ -46,6 +45,7 @@ thiserror = "1.0.31"
toml = "0.8.8" # to parse manifest files
constraint_writers = { git = "https://github.com/iden3/circom.git", tag = "v2.1.8" } # to generate r1cs file
num-bigint-dig = "0.6.0" # to adapt for circom lib
rand = "0.8.5"
rstest = "0.19.0" # for testing different backend cases
rug = "1.26.1" # circ uses this for integer type
circ = { git = "https://github.com/circify/circ", rev = "8140b1369edd5992ede038d2e9e5721510ae7065" } # for compiling to circ IR
Expand Down
28 changes: 18 additions & 10 deletions src/backends/kimchi/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,18 @@ use crate::{
};

use itertools::chain;
use kimchi::mina_curves::pasta::{Vesta, VestaParameters};
use kimchi::mina_poseidon::constants::PlonkSpongeConstantsKimchi;
use kimchi::mina_poseidon::sponge::{DefaultFqSponge, DefaultFrSponge};
use kimchi::poly_commitment::commitment::CommitmentCurve;
use kimchi::poly_commitment::evaluation_proof::OpeningProof;
use kimchi::poly_commitment::ipa::OpeningProof;
use kimchi::proof::ProverProof;
use kimchi::{
circuits::constraints::ConstraintSystem, groupmap::GroupMap, mina_curves::pasta::Pallas,
poly_commitment::srs::SRS,
poly_commitment::ipa::SRS,
};
use kimchi::{
mina_curves::pasta::{Vesta, VestaParameters},
poly_commitment::SRS as _,
};

use miette::{Context, IntoDiagnostic};
Expand Down Expand Up @@ -107,14 +110,14 @@ impl KimchiVesta {
.wrap_err("kimchi: could not create a constraint system with the given circuit and public input size")?;

// create SRS (for vesta, as the circuit is in Fp)
let mut srs = SRS::<Curve>::create(cs.domain.d1.size as usize);
srs.add_lagrange_basis(cs.domain.d1);
let srs = SRS::<Curve>::create(cs.domain.d1.size as usize);
srs.get_lagrange_basis(cs.domain.d1);
let srs = std::sync::Arc::new(srs);

println!("using an SRS of size {}", srs.g.len());

// create indexes
let (endo_q, _endo_r) = kimchi::poly_commitment::srs::endos::<OtherCurve>();
let (endo_q, _endo_r) = kimchi::poly_commitment::ipa::endos::<OtherCurve>();

let prover_index = kimchi::prover_index::ProverIndex::<Curve, OpeningProof<Curve>>::create(
cs, endo_q, srs,
Expand Down Expand Up @@ -198,10 +201,15 @@ impl ProverIndex {
}

// create proof
let proof =
ProverProof::create::<BaseSponge, ScalarSponge>(&GROUP_MAP, witness, &[], &self.index)
.into_diagnostic()
.wrap_err("kimchi: could not create a proof with the given inputs")?;
let proof = ProverProof::create::<BaseSponge, ScalarSponge, _>(
&GROUP_MAP,
witness,
&[],
&self.index,
&mut rand::rngs::OsRng,
)
.into_diagnostic()
.wrap_err("kimchi: could not create a proof with the given inputs")?;

// return proof + public output
Ok((
Expand Down
3 changes: 1 addition & 2 deletions src/backends/r1cs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ pub mod snarkjs;

use std::collections::{HashMap, HashSet};

use ark_ff::FpParameters;
use circ::cfg::{CircCfg, CircOpt};
use circ_fields::FieldV;
use itertools::{izip, Itertools as _};
Expand Down Expand Up @@ -303,7 +302,7 @@ where

/// Returns the prime for snarkjs based on the curve field.
fn prime(&self) -> BigUint {
F::Params::MODULUS.into()
F::MODULUS.into()
}

/// Add an r1cs constraint that is 3 linear combinations.
Expand Down
3 changes: 2 additions & 1 deletion src/backends/r1cs/snarkjs.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use crate::backends::BackendField;
use ark_ff::BigInteger;
use constraint_writers::r1cs_writer::{ConstraintSection, HeaderData, R1CSWriter};
use miette::Diagnostic;
use thiserror::Error;
Expand Down Expand Up @@ -191,7 +192,7 @@ where
fn convert_to_bigint(value: &F) -> BigInt {
BigInt::from_bytes_le(
num_bigint_dig::Sign::Plus,
&ark_ff::BigInteger::to_bytes_le(&value.into_repr()),
&value.into_bigint().to_bytes_le(),
)
}
}
Expand Down
5 changes: 3 additions & 2 deletions src/inputs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

use std::{collections::HashMap, fs::File, io::Read, str::FromStr};

use ark_ff::{One, PrimeField, Zero};
use ark_ff::{One, Zero};
use kimchi::o1_utils::FieldHelpers;
use miette::Diagnostic;
use num_bigint::BigUint;
use thiserror::Error;
Expand Down Expand Up @@ -170,7 +171,7 @@ pub trait ExtField /* : PrimeField*/ {

impl ExtField for VestaField {
fn to_dec_string(&self) -> String {
let biguint: BigUint = self.into_repr().into();
let biguint: BigUint = self.to_biguint();
biguint.to_str_radix(10)
}
}
Expand Down
13 changes: 8 additions & 5 deletions src/serialization.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! This adds a few utility functions for serializing and deserializing
//! [arkworks](http://arkworks.rs/) types that implement [CanonicalSerialize] and [CanonicalDeserialize].

use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use serde_with::Bytes;

//
Expand All @@ -14,6 +14,7 @@ pub mod ser {
//! `#[serde(with = "o1_utils::serialization::ser") attribute"]`

use super::*;
use ark_serialize::{Compress, Validate};
use serde_with::{DeserializeAs, SerializeAs};

/// You can use this to serialize an arkworks type with serde and the "serialize_with" attribute.
Expand All @@ -23,7 +24,7 @@ pub mod ser {
S: serde::Serializer,
{
let mut bytes = vec![];
val.serialize(&mut bytes)
val.serialize_with_mode(&mut bytes, Compress::Yes)
.map_err(serde::ser::Error::custom)?;

Bytes::serialize_as(&bytes, serializer)
Expand All @@ -37,7 +38,8 @@ pub mod ser {
D: serde::Deserializer<'de>,
{
let bytes: Vec<u8> = Bytes::deserialize_as(deserializer)?;
T::deserialize(&mut &bytes[..]).map_err(serde::de::Error::custom)
T::deserialize_with_mode(&mut &bytes[..], Compress::Yes, Validate::Yes)
.map_err(serde::de::Error::custom)
}
}

Expand All @@ -60,7 +62,7 @@ where
S: serde::Serializer,
{
let mut bytes = vec![];
val.serialize(&mut bytes)
val.serialize_with_mode(&mut bytes, Compress::Yes)
.map_err(serde::ser::Error::custom)?;

Bytes::serialize_as(&bytes, serializer)
Expand All @@ -76,6 +78,7 @@ where
D: serde::Deserializer<'de>,
{
let bytes: Vec<u8> = Bytes::deserialize_as(deserializer)?;
T::deserialize(&mut &bytes[..]).map_err(serde::de::Error::custom)
T::deserialize_with_mode(&mut &bytes[..], Compress::Yes, Validate::Yes)
.map_err(serde::de::Error::custom)
}
}
2 changes: 1 addition & 1 deletion src/stdlib/bits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ fn check_field_size<B: Backend>(
span: Span,
) -> Result<Option<Var<B::Field, B::Var>>> {
let var = &vars[0].var[0];
let bit_len = B::Field::size_in_bits() as u64;
let bit_len = B::Field::MODULUS_BIT_SIZE as u64;

match var {
ConstOrCell::Const(cst) => {
Expand Down
Loading