Skip to content

Commit

Permalink
Merge pull request #2588 from o1-labs/volhovm/arkworks042-develop
Browse files Browse the repository at this point in the history
Update arkworks to 0.4.2 (master)
  • Loading branch information
dannywillems authored Sep 25, 2024
2 parents b721788 + 47bcd2d commit 49d56a9
Show file tree
Hide file tree
Showing 126 changed files with 2,665 additions and 1,268 deletions.
285 changes: 65 additions & 220 deletions Cargo.lock

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@ members = [
resolver = "2"

[workspace.dependencies]
ark-algebra-test-templates = "0.3.0"
ark-bn254 = { version = "0.3.0" }
ark-ec = { version = "0.3.0", features = ["parallel"] }
ark-ff = { version = "0.3.0", features = ["parallel", "asm"] }
ark-poly = { version = "0.3.0", features = ["parallel"] }
ark-serialize = "0.3.0"
ark-std = "0.3.0"
ark-algebra-test-templates = "0.4.2"
ark-bn254 = { version = "0.4.0" }
ark-ec = { version = "0.4.2", features = ["parallel"] }
ark-ff = { version = "0.4.2", features = ["parallel", "asm"] }
ark-poly = { version = "0.4.2", features = ["parallel"] }
ark-serialize = "0.4.2"
ark-std = "0.4.0"
bcs = "0.1.3"
base64 = "0.21.5"
bitvec = "1.0.0"
Expand Down
6 changes: 3 additions & 3 deletions arrabiata/src/prover.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! A prover for the folding/accumulation scheme

use crate::proof::Proof;
use ark_ec::AffineCurve;
use ark_ec::AffineRepr;
use ark_ff::PrimeField;

use crate::witness::Env;
Expand All @@ -12,8 +12,8 @@ use crate::witness::Env;
pub fn prove<
Fp: PrimeField,
Fq: PrimeField,
E1: AffineCurve<ScalarField = Fp, BaseField = Fq>,
E2: AffineCurve<ScalarField = Fq, BaseField = Fp>,
E1: AffineRepr<ScalarField = Fp, BaseField = Fq>,
E2: AffineRepr<ScalarField = Fq, BaseField = Fp>,
>(
_env: &Env<Fp, Fq, E1, E2>,
) -> Result<Proof, String> {
Expand Down
18 changes: 9 additions & 9 deletions arrabiata/src/witness.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use ark_ec::{AffineCurve, SWModelParameters};
use ark_ec::{models::short_weierstrass::SWCurveConfig, AffineRepr};
use ark_ff::PrimeField;
use ark_poly::Evaluations;
use kimchi::circuits::{domains::EvaluationDomains, gate::CurrOrNext};
Expand Down Expand Up @@ -32,8 +32,8 @@ pub const IVC_STARTING_INSTRUCTION: Instruction = Instruction::Poseidon(0);
pub struct Env<
Fp: PrimeField,
Fq: PrimeField,
E1: AffineCurve<ScalarField = Fp, BaseField = Fq>,
E2: AffineCurve<ScalarField = Fq, BaseField = Fp>,
E1: AffineRepr<ScalarField = Fp, BaseField = Fq>,
E2: AffineRepr<ScalarField = Fq, BaseField = Fp>,
> {
// ----------------
// Setup related (domains + SRS)
Expand Down Expand Up @@ -189,8 +189,8 @@ impl<
E2: CommitmentCurve<ScalarField = Fq, BaseField = Fp>,
> InterpreterEnv for Env<Fp, Fq, E1, E2>
where
<E1::Params as ark_ec::ModelParameters>::BaseField: PrimeField,
<E2::Params as ark_ec::ModelParameters>::BaseField: PrimeField,
<E1::Params as ark_ec::CurveConfig>::BaseField: PrimeField,
<E2::Params as ark_ec::CurveConfig>::BaseField: PrimeField,
{
type Position = (Column, CurrOrNext);

Expand Down Expand Up @@ -815,8 +815,8 @@ impl<
sponge_e2: [BigInt; 3],
) -> Self {
{
assert!(Fp::size_in_bits() <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fp is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}");
assert!(Fq::size_in_bits() <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fq is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}");
assert!(Fp::MODULUS_BIT_SIZE <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fp is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}");
assert!(Fq::MODULUS_BIT_SIZE <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fq is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}");
let modulus_fp = Fp::modulus_biguint();
assert!(
(modulus_fp - BigUint::from(1_u64)).gcd(&BigUint::from(POSEIDON_ALPHA))
Expand Down Expand Up @@ -871,10 +871,10 @@ impl<

// Default set to the blinders. Using double to make the EC scaling happy.
let previous_commitments_e1: Vec<PolyComm<E1>> = (0..NUMBER_OF_COLUMNS)
.map(|_| PolyComm::new(vec![srs_e1.h + srs_e1.h]))
.map(|_| PolyComm::new(vec![(srs_e1.h + srs_e1.h).into()]))
.collect();
let previous_commitments_e2: Vec<PolyComm<E2>> = (0..NUMBER_OF_COLUMNS)
.map(|_| PolyComm::new(vec![srs_e2.h + srs_e2.h]))
.map(|_| PolyComm::new(vec![(srs_e2.h + srs_e2.h).into()]))
.collect();
// FIXME: zero will not work.
let ivc_accumulator_e1: Vec<PolyComm<E1>> = (0..NUMBER_OF_COLUMNS)
Expand Down
28 changes: 15 additions & 13 deletions arrabiata/tests/witness.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
use ark_ec::{short_weierstrass_jacobian::GroupAffine, ProjectiveCurve, SWModelParameters};
use ark_ec::{
models::short_weierstrass::{Affine, SWCurveConfig},
AffineRepr, Group,
};
use ark_ff::{PrimeField, UniformRand};
use arrabiata::{
interpreter::{self, Instruction, InterpreterEnv},
Expand Down Expand Up @@ -29,20 +32,18 @@ impl SpongeConstants for PlonkSpongeConstants {
const PERM_INITIAL_ARK: bool = false;
}

fn helper_generate_random_elliptic_curve_point<RNG, P: SWModelParameters>(
rng: &mut RNG,
) -> GroupAffine<P>
fn helper_generate_random_elliptic_curve_point<RNG, P: SWCurveConfig>(rng: &mut RNG) -> Affine<P>
where
P::BaseField: PrimeField,
RNG: RngCore + CryptoRng,
{
let p1_x = P::BaseField::rand(rng);
let mut p1: Option<GroupAffine<P>> = GroupAffine::<P>::get_point_from_x(p1_x, false);
let mut p1: Option<Affine<P>> = Affine::<P>::get_point_from_x_unchecked(p1_x, false);
while p1.is_none() {
let p1_x = P::BaseField::rand(rng);
p1 = GroupAffine::<P>::get_point_from_x(p1_x, false);
p1 = Affine::<P>::get_point_from_x_unchecked(p1_x, false);
}
let p1: GroupAffine<P> = p1.unwrap().scale_by_cofactor().into();
let p1: Affine<P> = p1.unwrap().mul_by_cofactor_to_group().into();
p1
}

Expand Down Expand Up @@ -179,7 +180,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 0);
let (exp_x3, exp_y3) = {
let res: Pallas =
env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0];
(env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand All @@ -198,7 +199,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 1);
let (exp_x3, exp_y3) = {
let res: Vesta =
env.ivc_accumulator_e1[0].elems[0] + env.previous_commitments_e1[0].elems[0];
(env.ivc_accumulator_e1[0].elems[0] + env.previous_commitments_e1[0].elems[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand All @@ -217,7 +218,7 @@ fn test_unit_witness_elliptic_curve_addition() {
assert_eq!(env.current_iteration, 2);
let (exp_x3, exp_y3) = {
let res: Pallas =
env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0];
(env.ivc_accumulator_e2[0].elems[0] + env.previous_commitments_e2[0].elems[0]).into();
let (x3, y3) = res.to_coordinates().unwrap();
(
x3.to_biguint().to_bigint().unwrap(),
Expand Down Expand Up @@ -254,7 +255,7 @@ fn test_witness_double_elliptic_curve_point() {
let p1_y = env.write_column(pos_y, p1.y.to_biguint().into());
let (res_x, res_y) = env.double_ec_point(pos_x, pos_y, p1_x, p1_y);

let exp_res: Pallas = p1 + p1;
let exp_res: Pallas = (p1 + p1).into();
let exp_x: BigInt = exp_res.x.to_biguint().into();
let exp_y: BigInt = exp_res.y.to_biguint().into();

Expand Down Expand Up @@ -291,8 +292,9 @@ where
let res_y: BigInt = env.state[1].clone();

let p1_proj: ProjectivePallas = p1.into();
let p1_r: Pallas = p1_proj.mul(r.clone().to_u64_digits().1).into();
let exp_res: Pallas = p1_r + env.srs_e2.h;
// @volhovm TODO check if mul_bigint is what was intended
let p1_r: Pallas = p1_proj.mul_bigint(r.clone().to_u64_digits().1).into();
let exp_res: Pallas = (p1_r + env.srs_e2.h).into();

let exp_x: BigInt = exp_res.x.to_biguint().into();
let exp_y: BigInt = exp_res.y.to_biguint().into();
Expand Down
8 changes: 4 additions & 4 deletions book/src/specs/kimchi.md
Original file line number Diff line number Diff line change
Expand Up @@ -2037,7 +2037,7 @@ pub struct ProofEvaluations<Evals> {
#[serde_as]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
pub struct LookupCommitments<G: AffineCurve> {
pub struct LookupCommitments<G: AffineRepr> {
/// Commitments to the sorted lookup table polynomial (may have chunks)
pub sorted: Vec<PolyComm<G>>,
/// Commitment to the lookup aggregation polynomial
Expand All @@ -2050,7 +2050,7 @@ pub struct LookupCommitments<G: AffineCurve> {
#[serde_as]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
pub struct ProverCommitments<G: AffineCurve> {
pub struct ProverCommitments<G: AffineRepr> {
/// The commitments to the witness (execution trace)
pub w_comm: [PolyComm<G>; COLUMNS],
/// The commitment to the permutation polynomial
Expand All @@ -2065,7 +2065,7 @@ pub struct ProverCommitments<G: AffineCurve> {
#[serde_as]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
pub struct ProverProof<G: AffineCurve, OpeningProof> {
pub struct ProverProof<G: AffineRepr, OpeningProof> {
/// All the polynomial commitments required in the proof
pub commitments: ProverCommitments<G>,

Expand Down Expand Up @@ -2093,7 +2093,7 @@ pub struct ProverProof<G: AffineCurve, OpeningProof> {
#[serde(bound = "G: ark_serialize::CanonicalDeserialize + ark_serialize::CanonicalSerialize")]
pub struct RecursionChallenge<G>
where
G: AffineCurve,
G: AffineRepr,
{
/// Vector of scalar field elements
#[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
Expand Down
47 changes: 47 additions & 0 deletions circuit-construction/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
[package]
name = "circuit-construction"
version = "0.1.0"
description = "A simple circuit writer for kimchi"
repository = "https://github.com/o1-labs/proof-systems"
edition = "2021"
license = "Apache-2.0"
homepage = "https://o1-labs.github.io/proof-systems/"
documentation = "https://o1-labs.github.io/proof-systems/rustdoc/"
readme = "../README.md"

[lib]
path = "src/lib.rs"
bench = false # needed for criterion (https://bheisler.github.io/criterion.rs/book/faq.html#cargo-bench-gives-unrecognized-option-errors-for-valid-command-line-options)

[dependencies]
ark-ff.workspace = true
ark-ec.workspace = true
ark-poly.workspace = true
ark-serialize.workspace = true
blake2.workspace = true
num-derive.workspace = true
num-traits.workspace = true
itertools.workspace = true
rand.workspace = true
rand_core.workspace = true
rayon.workspace = true
rmp-serde.workspace = true
serde.workspace = true
serde_with.workspace = true
thiserror.workspace = true

poly-commitment.workspace = true
groupmap.workspace = true
mina-curves.workspace = true
o1-utils.workspace = true
mina-poseidon.workspace = true
kimchi.workspace = true

[dev-dependencies]
proptest.workspace = true
proptest-derive.workspace = true
colored.workspace = true

# benchmarks
criterion.workspace = true
iai.workspace = true
44 changes: 44 additions & 0 deletions circuit-construction/src/constants.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
use ark_ec::AffineRepr;
use ark_ff::Field;
use kimchi::curve::KimchiCurve;
use mina_curves::pasta::{Fp, Fq, Pallas as PallasAffine, Vesta as VestaAffine};
use mina_poseidon::poseidon::ArithmeticSpongeParams;
use poly_commitment::{commitment::CommitmentCurve, srs::endos};

/// The type of possible constants in the circuit
#[derive(Clone)]
pub struct Constants<F: Field + 'static> {
pub poseidon: &'static ArithmeticSpongeParams<F>,
pub endo: F,
pub base: (F, F),
}

/// Constants for the base field of Pallas
/// ///
/// # Panics
///
/// Will panic if `PallasAffine::generator()` returns None.
pub fn fp_constants() -> Constants<Fp> {
let (endo_q, _endo_r) = endos::<PallasAffine>();
let base = PallasAffine::generator().to_coordinates().unwrap();
Constants {
poseidon: VestaAffine::sponge_params(),
endo: endo_q,
base,
}
}

/// Constants for the base field of Vesta
///
/// # Panics
///
/// Will panic if `VestaAffine::generator()` returns None.
pub fn fq_constants() -> Constants<Fq> {
let (endo_q, _endo_r) = endos::<VestaAffine>();
let base = VestaAffine::generator().to_coordinates().unwrap();
Constants {
poseidon: PallasAffine::sponge_params(),
endo: endo_q,
base,
}
}
33 changes: 33 additions & 0 deletions circuit-construction/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#![doc = include_str!("../../README.md")]

/// Definition of possible constants in circuits
pub mod constants;
/// This contains the prover functions, ranging from curves definitions to prover index and proof generation
pub mod prover;
/// This is the actual writer with all of the available functions to set up a circuit and its corresponding constraint system
pub mod writer;

#[cfg(test)]
mod tests;

/// This contains the Kimchi dependencies being used
pub mod prologue {
pub use super::constants::{fp_constants, fq_constants, Constants};
pub use super::prover::{generate_prover_index, prove, CoordinateCurve};
pub use super::writer::{Cs, Var};
pub use ark_ec::{AffineRepr, CurveGroup};
pub use ark_ff::{FftField, PrimeField, UniformRand};
pub use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
pub use groupmap::GroupMap;
pub use kimchi::verifier::verify;
pub use mina_curves::pasta::{
Fp, Pallas as PallasAffine, Vesta as VestaAffine, VestaParameters,
};
pub use mina_poseidon::{
constants::*,
poseidon::{ArithmeticSponge, Sponge},
sponge::{DefaultFqSponge, DefaultFrSponge},
};
pub use poly_commitment::{commitment::CommitmentCurve, srs::SRS};
pub use std::sync::Arc;
}
Loading

0 comments on commit 49d56a9

Please sign in to comment.