From 2858a224c878de47cfd40e66ce4c8963a44b5463 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 10 Oct 2024 11:50:41 -0500 Subject: [PATCH 001/105] remove zombie code: rand in InitPhase1 --- backend/groth16/bn254/mpcsetup/phase1.go | 9 --------- 1 file changed, 9 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a912a473aa..e09e34b0ce 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -52,15 +52,6 @@ type Phase1 struct { func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - // First contribution use generators _, _, g1, g2 := curve.Generators() phase1.Parameters.G2.Beta.Set(&g2) From 3477517d49ad686696e3b8d8511fa677870bc11e Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 10 Oct 2024 12:13:12 -0500 Subject: [PATCH 002/105] docs comment genR --- backend/groth16/bn254/mpcsetup/utils.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e3b47d1121..f455173232 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -156,6 +156,8 @@ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { } // Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) From 2e425fc40bf766f68931393b0b073a2fb06e92fb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:29:30 -0500 Subject: [PATCH 003/105] revert bring init back --- backend/groth16/bn254/mpcsetup/phase1.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index e09e34b0ce..a912a473aa 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -52,6 +52,15 @@ type Phase1 struct { func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) + // Generate key pairs + var tau, alpha, beta fr.Element + tau.SetOne() + alpha.SetOne() + beta.SetOne() + phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) + phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) + phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) + // First contribution use generators _, _, g1, g2 := curve.Generators() phase1.Parameters.G2.Beta.Set(&g2) From 2719edb918720627d29d27fca45189d478971352 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:29:14 -0500 Subject: [PATCH 004/105] refactor: updateValue and verify --- backend/groth16/bn254/mpcsetup/utils.go | 121 +++++++++++++++++++++--- 1 file changed, 110 insertions(+), 11 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f455173232..e4bd05fa0d 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -18,9 +18,11 @@ package mpcsetup import ( "bytes" + "crypto/rand" "math/big" "math/bits" "runtime" + "time" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" @@ -31,7 +33,7 @@ import ( type PublicKey struct { SG curve.G1Affine SXG curve.G1Affine - XR curve.G2Affine + XR curve.G2Affine // XR = X.R ∈ 𝔾₂ proof of knowledge } func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { @@ -69,7 +71,7 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form +// Returns [1, a, a², ..., aⁿ⁻¹ ] func powers(a fr.Element, n int) []fr.Element { result := make([]fr.Element, n) result[0] = fr.NewElement(1) @@ -79,7 +81,7 @@ func powers(a fr.Element, n int) []fr.Element { return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,7 +92,7 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -101,16 +103,22 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } +/* // Check e(a₁, a₂) = e(b₁, b₂) func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { panic("invalid point not in subgroup") } - var na2 curve.G2Affine - na2.Neg(&a2) + return sameRatioUnsafe(a1, b1, a2, b2) +}*/ + +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } @@ -129,7 +137,7 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 +// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₁ func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { nc := runtime.NumCPU() n := len(A) @@ -142,7 +150,7 @@ func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 +// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₂ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { nc := runtime.NumCPU() n := len(A) @@ -155,7 +163,7 @@ func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) // it is to be used as a challenge for generating a proof of knowledge to x // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { @@ -170,3 +178,94 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type RandomBeacon func(time.Time) []byte + +// func (rb RandomBeacon) GenerateChallenge(...) []byte {} + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ + updatedCommitment pair // [X₁..Xⱼ] +} + +// updateValue produces values associated with contribution to an existing value. +// if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment +func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { + var x valueUpdate + contributionValue, err := rand.Int(rand.Reader, fr.Modulus()) + + eraseToxicWaste := func() { + if contributionValue == nil { + return + } + for i := range contributionValue.Bits() { // TODO check that this works + contributionValue.Bits()[i] = 0 + } + } + defer eraseToxicWaste() + + if err != nil { + panic(err) + } + + _, _, g1, _ := curve.Generators() + x.contributionCommitment.ScalarMultiplication(&g1, contributionValue) + x.updatedCommitment.g1.ScalarMultiplication(&prevCommitment.g1, contributionValue) + if prevCommitment.g2 != nil { // TODO make sure this is correct + x.updatedCommitment.g2 = new(curve.G2Affine).ScalarMultiplication(prevCommitment.g2, contributionValue) + } + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // r + x.contributionPok.ScalarMultiplication(&pokBase, contributionValue) + + return x +} + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bool { + noG2 := prevCommitment.g2 == nil + if noG2 != (x.updatedCommitment.g2 == nil) { // no erasing or creating g2 values + return false + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !x.updatedCommitment.validUpdate() { + return false + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π ?= x.r i.e. x/g1 =? π/r + return false + } + + // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, *x.updatedCommitment.g2, *prevCommitment.g2) { + return false + } + + // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, x.contributionPok, r) { + return false + } + + return true +} From 7c93ec0e415cd8982ebf13db3c375767e02ef833 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 15:43:56 -0500 Subject: [PATCH 005/105] feat new phase struct --- backend/groth16/bn254/mpcsetup/phase1.go | 3 +++ backend/groth16/bn254/mpcsetup/utils.go | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a912a473aa..1095392b36 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -25,6 +25,9 @@ import ( "math/big" ) +type phase1 struct { +} + // Phase1 represents the Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e4bd05fa0d..e45ea811a2 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -252,7 +252,7 @@ func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bo // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π ?= x.r i.e. x/g1 =? π/r + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return false } From c35033691005ec06fd4f0f422a5a39117e566feb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:04:06 -0500 Subject: [PATCH 006/105] refactor phase1 --- backend/groth16/bn254/mpcsetup/phase1.go | 204 +++++++++++++++++++---- backend/groth16/bn254/mpcsetup/utils.go | 101 ++++++----- 2 files changed, 231 insertions(+), 74 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1095392b36..afed0e31a6 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -19,13 +19,89 @@ package mpcsetup import ( "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math" "math/big" + "runtime" + "sync" ) +// Phase1 represents the Phase1 of the MPC described in +// https://eprint.iacr.org/2017/1050.pdf +// +// Also known as "Powers of Tau" type phase1 struct { + Principal struct { // "main" contributions + Tau, Alpha, Beta valueUpdate + } + G1Derived struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + } + G2Derived struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} + } + Challenge []byte // Hash of the transcript PRIOR to this participant +} + +func eraseBigInts(i ...*big.Int) { + for _, i := range i { + if i != nil { + for j := range i.Bits() { + i.Bits()[j] = 0 + } + } + } +} + +func eraseFrVectors(v ...[]fr.Element) { + for _, v := range v { + for i := range v { + v[i].SetZero() + } + } +} + +// Contribute contributes randomness to the phase1 object. This mutates phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *phase1) Contribute() { + N := len(p.G2Derived.Tau) + challenge := p.hash() + + // Generate main value updates + var tau, alpha, beta *big.Int + p.Principal.Tau, tau = updateValue(p.Principal.Tau.updatedCommitment, challenge, 1) + p.Principal.Alpha, alpha = updateValue(p.Principal.Alpha.updatedCommitment, challenge, 2) + p.Principal.Beta, beta = updateValue(p.Principal.Beta.updatedCommitment, challenge, 3) + + defer eraseBigInts(tau, alpha, beta) + + // Compute τ, ατ, and βτ + taus := powers(tau, 2*N-1) + alphaTau := make([]fr.Element, N) + betaTau := make([]fr.Element, N) + + defer eraseFrVectors(taus, alphaTau, betaTau) + + alphaTau[0].SetBigInt(alpha) + betaTau[0].SetBigInt(beta) + for i := 1; i < N; i++ { + alphaTau[i].Mul(&taus[i], &alphaTau[0]) + betaTau[i].Mul(&taus[i], &betaTau[0]) + } + + // Update using previous parameters + // TODO @gbotrel working with jacobian points here will help with perf. + scaleG1InPlace(p.G1Derived.Tau, taus) + scaleG2InPlace(p.G2Derived.Tau, taus[0:N]) + scaleG1InPlace(p.G1Derived.AlphaTau, alphaTau) + scaleG1InPlace(p.G1Derived.BetaTau, betaTau) + + p.Challenge = challenge } // Phase1 represents the Phase1 of the MPC described in @@ -88,42 +164,6 @@ func InitPhase1(power int) (phase1 Phase1) { return } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() -} - func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { contribs := append([]*Phase1{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { @@ -134,6 +174,97 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { return nil } +// Verify assumes previous is correct +func (p *phase1) Verify(previous *phase1) error { + + if err := p.Principal.Tau.verify(previous.Principal.Tau.updatedCommitment, p.Challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) + } + if err := p.Principal.Alpha.verify(previous.Principal.Alpha.updatedCommitment, p.Challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) + } + if err := p.Principal.Beta.verify(previous.Principal.Beta.updatedCommitment, p.Challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) + } + + if !areInSubGroupG1(p.G1Derived.Tau) || !areInSubGroupG1(p.G1Derived.BetaTau) || !areInSubGroupG1(p.G1Derived.AlphaTau) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + if !areInSubGroupG2(p.G2Derived.Tau) { + return errors.New("derived values 𝔾₂ subgroup check failed") + } + + _, _, g1, g2 := curve.Generators() + + // for 1 ≤ i ≤ 2N-3 we want to check τⁱ⁺¹/τⁱ = τ + // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check + // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check + // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ + r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths + // will be reusing the coefficient TODO @Tabaie make sure that's okay + nc := runtime.NumCPU() + var ( + tauT1, tauS1, alphaTT, alphaTS, betaTT, betaTS curve.G1Affine + tauT2, tauS2 curve.G2Affine + wg sync.WaitGroup + ) + + mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { + if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { + panic(err) + } + wg.Done() + } + + mulExpG2 := func(v *curve.G2Affine, points []curve.G2Affine, nbTasks int) { + if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { + panic(err) + } + wg.Done() + } + + if nc < 2 { + mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc) + mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc) + } else { + // larger tasks than the others. better get them done together + wg.Add(2) + go mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc/2) // truncated: smaller powers + mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc - nc/2) // shifted: larger powers + wg.Wait() + } + + if nc < 4 { + mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc) + mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) + mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) + } else { + wg.Add(4) + go mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc/4) + go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2 - nc/4) + go mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc/4) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc - nc/2 - nc/4) + wg.Wait() + } + + + + if err := tauT1.MultiExp.G1Derived.Tau[:len(p.G1Derived.Tau)-1], r, ecc.MultiExpConfig{NbTasks: nc/2}) + + tauT1, tauS1 := linearCombinationG1(r, p.G1Derived.Tau[1:]) // at this point we should already know that tau[0] = infty and tau[1] = τ. ReadFrom is in charge of ensuring that. + + + if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { + return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") + } + tauT2, tauS2 := linearCombinationG2(r, p.G2Derived.Tau[1:]) + if !sameRatioUnsafe(p.Principal.Tau.updatedCommitment.g1, g1, tauS2, tauT2) { + return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") + } + +} + // verifyPhase1 checks that a contribution is based on a known previous Phase1 state. func verifyPhase1(current, contribution *Phase1) error { // Compute R for τ, α, β @@ -153,6 +284,7 @@ func verifyPhase1(current, contribution *Phase1) error { } // Check for valid updates using previous parameters + // if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { return errors.New("couldn't verify that [τ]₁ is based on previous contribution") } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e45ea811a2..499fda08d1 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -19,6 +19,7 @@ package mpcsetup import ( "bytes" "crypto/rand" + "errors" "math/big" "math/bits" "runtime" @@ -71,12 +72,22 @@ func bitReverse[T any](a []T) { } } +func linearCombCoeffs(n int) []fr.Element { + a, err := rand.Int(rand.Reader, fr.Modulus()) + if err != nil { + panic(err) + } + return powers(a, n) +} + // Returns [1, a, a², ..., aⁿ⁻¹ ] -func powers(a fr.Element, n int) []fr.Element { +func powers(a *big.Int, n int) []fr.Element { + var aMont fr.Element + aMont.SetBigInt(a) result := make([]fr.Element, n) result[0] = fr.NewElement(1) for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + result[i].Mul(&result[i-1], &aMont) } return result } @@ -137,29 +148,23 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₁ -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() +// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₁ +func linearCombinationG1(r []fr.Element, A []curve.G1Affine, nbTasks int) curve.G1Affine { n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + r = r[:n-1] + var res curve.G1Affine + res.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁∈𝔾₂ -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { +// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₂ +func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { nc := runtime.NumCPU() n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + r = r[:n-1] + truncated.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) return } @@ -203,20 +208,11 @@ type valueUpdate struct { // updateValue produces values associated with contribution to an existing value. // if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment -func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func updateValue(prevCommitment pair, challenge []byte, dst byte) (valueUpdate, *big.Int) { var x valueUpdate contributionValue, err := rand.Int(rand.Reader, fr.Modulus()) - eraseToxicWaste := func() { - if contributionValue == nil { - return - } - for i := range contributionValue.Bits() { // TODO check that this works - contributionValue.Bits()[i] = 0 - } - } - defer eraseToxicWaste() - if err != nil { panic(err) } @@ -232,40 +228,69 @@ func updateValue(prevCommitment pair, challenge []byte, dst byte) valueUpdate { pokBase := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // r x.contributionPok.ScalarMultiplication(&pokBase, contributionValue) - return x + return x, contributionValue } // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. // prevCommitment is assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) bool { +func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) error { noG2 := prevCommitment.g2 == nil - if noG2 != (x.updatedCommitment.g2 == nil) { // no erasing or creating g2 values - return false + if noG2 != (x.updatedCommitment.g2 == nil) { + return errors.New("erasing or creating g2 values") } if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !x.updatedCommitment.validUpdate() { - return false + return errors.New("contribution values subgroup check failed") } // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return false + return errors.New("contribution proof of knowledge verification failed") } // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. if !noG2 && !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, *x.updatedCommitment.g2, *prevCommitment.g2) { - return false + return errors.New("g2 update inconsistent") } // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values if !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, x.contributionPok, r) { - return false + return errors.New("g1 update inconsistent") } + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } return true } + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func truncate[T any](s []T) []T { + return s[:len(s)-1] +} From fcde6650dd764c5161e0153f6b9864f55b8caf79 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 16:55:43 -0500 Subject: [PATCH 007/105] feat complete phase1 verification --- backend/groth16/bn254/mpcsetup/phase1.go | 63 ++++++++++-------------- backend/groth16/bn254/mpcsetup/utils.go | 34 +++++++++++++ 2 files changed, 59 insertions(+), 38 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index afed0e31a6..34a157411f 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -202,11 +202,14 @@ func (p *phase1) Verify(previous *phase1) error { // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths // will be reusing the coefficient TODO @Tabaie make sure that's okay + + tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) + tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) + nc := runtime.NumCPU() var ( - tauT1, tauS1, alphaTT, alphaTS, betaTT, betaTS curve.G1Affine - tauT2, tauS2 curve.G2Affine - wg sync.WaitGroup + alphaTS, betaTS curve.G1Affine + wg sync.WaitGroup ) mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { @@ -216,53 +219,37 @@ func (p *phase1) Verify(previous *phase1) error { wg.Done() } - mulExpG2 := func(v *curve.G2Affine, points []curve.G2Affine, nbTasks int) { - if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { - panic(err) - } - wg.Done() - } - - if nc < 2 { - mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc) - mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc) - } else { - // larger tasks than the others. better get them done together - wg.Add(2) - go mulExpG1(&tauT1, truncate(p.G1Derived.Tau), nc/2) // truncated: smaller powers - mulExpG1(&tauS1, p.G1Derived.Tau[1:], nc - nc/2) // shifted: larger powers + if nc >= 2 { + wg.Add(2) // small tasks over 𝔾₁ + go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2) + mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc-nc/2) wg.Wait() - } - - if nc < 4 { - mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc) + } else { mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) - mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc) mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) - } else { - wg.Add(4) - go mulExpG1(&alphaTT, truncate(p.G1Derived.AlphaTau), nc/4) - go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2 - nc/4) - go mulExpG1(&betaTT, truncate(p.G1Derived.BetaTau), nc/4) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc - nc/2 - nc/4) - wg.Wait() } - - - if err := tauT1.MultiExp.G1Derived.Tau[:len(p.G1Derived.Tau)-1], r, ecc.MultiExpConfig{NbTasks: nc/2}) - - tauT1, tauS1 := linearCombinationG1(r, p.G1Derived.Tau[1:]) // at this point we should already know that tau[0] = infty and tau[1] = τ. ReadFrom is in charge of ensuring that. - - if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") } - tauT2, tauS2 := linearCombinationG2(r, p.G2Derived.Tau[1:]) + if !sameRatioUnsafe(p.Principal.Tau.updatedCommitment.g1, g1, tauS2, tauT2) { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } + // for 1 ≤ i < N we want to check ατⁱ/τⁱ = α + // with a similar bi-linearity argument as above we can do this with a single pairing check + // Note that the check at i = 0 is part of the well-formedness requirement and is not checked here, + // but guaranteed by ReadFrom. + + if !sameRatioUnsafe(alphaTS, tauS1, *p.Principal.Alpha.updatedCommitment.g2, g2) { + return errors.New("couldn't verify the ατⁱ") + } + if !sameRatioUnsafe(betaTS, tauS1, *p.Principal.Beta.updatedCommitment.g2, g2) { + return errors.New("couldn't verify the βτⁱ") + } + + return nil } // verifyPhase1 checks that a contribution is based on a known previous Phase1 state. diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 499fda08d1..6a446e3236 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -168,6 +168,40 @@ func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted return } +// linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, shifted curve.G1Affine) { + // the common section, 1 to N-2 + var common curve.G1Affine + common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + + var c big.Int + rPowers[1].BigInt(&c) + truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] + + rPowers[len(A)-1].BigInt(&c) + shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + + return +} + +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +func linearCombinationsG2(rPowers []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { + // the common section, 1 to N-2 + var common curve.G2Affine + common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + + var c big.Int + rPowers[1].BigInt(&c) + truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] + + rPowers[len(A)-1].BigInt(&c) + shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + + return +} + // Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) // it is to be used as a challenge for generating a proof of knowledge to x // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) From 6fc1ffe2e25596cd19e0936fbfcb2611c310c7d7 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:30:13 -0500 Subject: [PATCH 008/105] fix check without assuming alpha, beta in G2 --- backend/groth16/bn254/mpcsetup/marshal.go | 70 ++++---- backend/groth16/bn254/mpcsetup/phase1.go | 172 ++++--------------- backend/groth16/bn254/mpcsetup/setup_test.go | 16 +- backend/groth16/bn254/mpcsetup/utils.go | 27 +++ 4 files changed, 99 insertions(+), 186 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 08cb2ae3d1..2985022bbd 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -22,31 +22,31 @@ import ( ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) +func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { + n, err := p.writeTo(writer) if err != nil { return n, err } - nBytes, err := writer.Write(phase1.Hash) + nBytes, err := writer.Write(p.Hash) return int64(nBytes) + n, err } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { +func (p *Phase1) writeTo(writer io.Writer) (int64, error) { toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, + &p.PublicKeys.Tau.SG, + &p.PublicKeys.Tau.SXG, + &p.PublicKeys.Tau.XR, + &p.PublicKeys.Alpha.SG, + &p.PublicKeys.Alpha.SXG, + &p.PublicKeys.Alpha.XR, + &p.PublicKeys.Beta.SG, + &p.PublicKeys.Beta.SXG, + &p.PublicKeys.Beta.XR, + p.Parameters.G1.Tau, + p.Parameters.G1.AlphaTau, + p.Parameters.G1.BetaTau, + p.Parameters.G2.Tau, + &p.Parameters.G2.Beta, } enc := curve.NewEncoder(writer) @@ -59,22 +59,22 @@ func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { +func (p *Phase1) ReadFrom(reader io.Reader) (int64, error) { toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, + &p.PublicKeys.Tau.SG, + &p.PublicKeys.Tau.SXG, + &p.PublicKeys.Tau.XR, + &p.PublicKeys.Alpha.SG, + &p.PublicKeys.Alpha.SXG, + &p.PublicKeys.Alpha.XR, + &p.PublicKeys.Beta.SG, + &p.PublicKeys.Beta.SXG, + &p.PublicKeys.Beta.XR, + &p.Parameters.G1.Tau, + &p.Parameters.G1.AlphaTau, + &p.Parameters.G1.BetaTau, + &p.Parameters.G2.Tau, + &p.Parameters.G2.Beta, } dec := curve.NewDecoder(reader) @@ -83,8 +83,8 @@ func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { return dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) + p.Hash = make([]byte, 32) + nBytes, err := reader.Read(p.Hash) return dec.BytesRead() + int64(nBytes), err } diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 34a157411f..1099d727c2 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -17,23 +17,21 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" "fmt" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math" "math/big" - "runtime" - "sync" ) // Phase1 represents the Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" -type phase1 struct { +type Phase1 struct { Principal struct { // "main" contributions Tau, Alpha, Beta valueUpdate } @@ -48,27 +46,9 @@ type phase1 struct { Challenge []byte // Hash of the transcript PRIOR to this participant } -func eraseBigInts(i ...*big.Int) { - for _, i := range i { - if i != nil { - for j := range i.Bits() { - i.Bits()[j] = 0 - } - } - } -} - -func eraseFrVectors(v ...[]fr.Element) { - for _, v := range v { - for i := range v { - v[i].SetZero() - } - } -} - -// Contribute contributes randomness to the phase1 object. This mutates phase1. +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. // p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. -func (p *phase1) Contribute() { +func (p *Phase1) Contribute() { N := len(p.G2Derived.Tau) challenge := p.hash() @@ -104,33 +84,16 @@ func (p *phase1) Contribute() { p.Challenge = challenge } -// Phase1 represents the Phase1 of the MPC described in -// https://eprint.iacr.org/2017/1050.pdf -// -// Also known as "Powers of Tau" -type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } - } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash -} - // InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before // any randomness contribution is made (see Contribute()). func InitPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) + _, _, g1, g2 := curve.Generators() + + phase1.Challenge = []byte{0} + phase1.Principal.Alpha.setEmpty() + // Generate key pairs var tau, alpha, beta fr.Element tau.SetOne() @@ -167,7 +130,7 @@ func InitPhase1(power int) (phase1 Phase1) { func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { contribs := append([]*Phase1{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { + if err := contribs[i].Verify(contribs[i+1]); err != nil { return err } } @@ -175,7 +138,14 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { } // Verify assumes previous is correct -func (p *phase1) Verify(previous *phase1) error { +func (p *Phase1) Verify(previous *Phase1) error { + + if prevHash := previous.hash(); !bytes.Equal(p.Challenge, previous.hash()) { // if chain-verifying contributions, challenge fields are optional as they can be computed as we go + if len(p.Challenge) != 0 { + return errors.New("the challenge does not match the previous phase's hash") + } + p.Challenge = prevHash + } if err := p.Principal.Tau.verify(previous.Principal.Tau.updatedCommitment, p.Challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) @@ -205,29 +175,8 @@ func (p *phase1) Verify(previous *phase1) error { tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) - - nc := runtime.NumCPU() - var ( - alphaTS, betaTS curve.G1Affine - wg sync.WaitGroup - ) - - mulExpG1 := func(v *curve.G1Affine, points []curve.G1Affine, nbTasks int) { - if _, err := v.MultiExp(points, r[:len(points)], ecc.MultiExpConfig{NbTasks: nbTasks}); err != nil { - panic(err) - } - wg.Done() - } - - if nc >= 2 { - wg.Add(2) // small tasks over 𝔾₁ - go mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc/2) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc-nc/2) - wg.Wait() - } else { - mulExpG1(&alphaTS, p.G1Derived.AlphaTau[1:], nc) - mulExpG1(&betaTS, p.G1Derived.BetaTau[1:], nc) - } + alphaTT, alphaTS := linearCombinationsG1(r, p.G1Derived.AlphaTau) + betaTT, betaTS := linearCombinationsG1(r, p.G1Derived.BetaTau) if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") @@ -237,89 +186,26 @@ func (p *phase1) Verify(previous *phase1) error { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } - // for 1 ≤ i < N we want to check ατⁱ/τⁱ = α + // for 0 ≤ i < N we want to check the ατⁱ + // By well-formedness checked by ReadFrom, we assume that ατ⁰ = α + // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ // with a similar bi-linearity argument as above we can do this with a single pairing check - // Note that the check at i = 0 is part of the well-formedness requirement and is not checked here, - // but guaranteed by ReadFrom. - if !sameRatioUnsafe(alphaTS, tauS1, *p.Principal.Alpha.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(alphaTS, alphaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify the ατⁱ") } - if !sameRatioUnsafe(betaTS, tauS1, *p.Principal.Beta.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(betaTS, betaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify the βτⁱ") } return nil } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) - - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") - } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") - } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") - } - - // Check for valid updates using previous parameters - // - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") - } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") - } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") - } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") +func (p *Phase1) hash() []byte { + if len(p.Challenge) == 0 { + panic("challenge field missing") } - - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") - } - - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } - } - - return nil -} - -func (phase1 *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.writeTo(sha) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 63b717cac4..797af8119b 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -168,17 +168,17 @@ func (circuit *Circuit) Define(api frontend.API) error { return nil } -func (phase1 *Phase1) clone() Phase1 { +func (p *Phase1) clone() Phase1 { r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) + r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, p.Parameters.G1.Tau...) + r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, p.Parameters.G1.AlphaTau...) + r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, p.Parameters.G1.BetaTau...) - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, p.Parameters.G2.Tau...) + r.Parameters.G2.Beta = p.Parameters.G2.Beta - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + r.PublicKeys = p.PublicKeys + r.Hash = append(r.Hash, p.Hash...) return r } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 6a446e3236..c1a334b739 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -300,6 +300,15 @@ func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) er return nil } +// setEmpty does not provide proofs, only sets the value to [1] +func (x *valueUpdate) setEmpty(g1Only bool) { + _, _, g1, g2 := curve.Generators() + x.updatedCommitment.g1.Set(&g1) + if !g1Only { + x.updatedCommitment.g2 = &g2 + } +} + func toRefs[T any](s []T) []*T { res := make([]*T, len(s)) for i := range s { @@ -328,3 +337,21 @@ func areInSubGroupG2(s []curve.G2Affine) bool { func truncate[T any](s []T) []T { return s[:len(s)-1] } + +func eraseBigInts(i ...*big.Int) { + for _, i := range i { + if i != nil { + for j := range i.Bits() { + i.Bits()[j] = 0 + } + } + } +} + +func eraseFrVectors(v ...[]fr.Element) { + for _, v := range v { + for i := range v { + v[i].SetZero() + } + } +} From 4601ed7be8b5acdeff97deee173ff1bd380bca07 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:34:51 -0500 Subject: [PATCH 009/105] refactor initPhase1 --- .../groth16/bn254/mpcsetup/marshal_test.go | 2 +- backend/groth16/bn254/mpcsetup/phase1.go | 49 +++++++------------ backend/groth16/bn254/mpcsetup/setup_test.go | 8 +-- 3 files changed, 22 insertions(+), 37 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go index 386e3faf66..c8d7a6b004 100644 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ b/backend/groth16/bn254/mpcsetup/marshal_test.go @@ -34,7 +34,7 @@ func TestContributionSerialization(t *testing.T) { assert := require.New(t) // Phase 1 - srs1 := InitPhase1(9) + srs1 := NewPhase1(9) srs1.Contribute() assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1099d727c2..7eddd73e78 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -84,46 +84,31 @@ func (p *Phase1) Contribute() { p.Challenge = challenge } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before +// NewPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before // any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { +func NewPhase1(power int) (phase1 Phase1) { N := int(math.Pow(2, float64(power))) _, _, g1, g2 := curve.Generators() phase1.Challenge = []byte{0} - phase1.Principal.Alpha.setEmpty() - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) + phase1.Principal.Alpha.setEmpty(true) + phase1.Principal.Beta.setEmpty(true) + phase1.Principal.Tau.setEmpty(false) + + phase1.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) + phase1.G2Derived.Tau = make([]curve.G2Affine, N) + phase1.G1Derived.AlphaTau = make([]curve.G1Affine, N) + phase1.G1Derived.BetaTau = make([]curve.G1Affine, N) + for i := range phase1.G1Derived.Tau { + phase1.G1Derived.Tau[i].Set(&g1) + } + for i := range phase1.G2Derived.Tau { + phase1.G2Derived.Tau[i].Set(&g2) + phase1.G1Derived.AlphaTau[i].Set(&g1) + phase1.G1Derived.BetaTau[i].Set(&g1) } - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - return } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 797af8119b..3689969c69 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -40,7 +40,7 @@ func TestSetupCircuit(t *testing.T) { assert := require.New(t) - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) // Make and verify contributions for phase1 for i := 1; i < nContributionsPhase1; i++ { @@ -104,12 +104,12 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + _ = NewPhase1(power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -120,7 +120,7 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) + srs1 := NewPhase1(power) srs1.Contribute() var myCircuit Circuit From 9b82466305117a832de6fb3ee88981fd9683d24d Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 15 Oct 2024 15:45:13 -0500 Subject: [PATCH 010/105] feat phase1 marshal --- backend/groth16/bn254/mpcsetup/marshal.go | 98 ++++++++++--------- .../groth16/bn254/mpcsetup/marshal_test.go | 3 +- backend/groth16/bn254/mpcsetup/phase1.go | 51 +++++----- backend/groth16/bn254/mpcsetup/utils.go | 24 +---- 4 files changed, 82 insertions(+), 94 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 2985022bbd..34c7b95a23 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -21,36 +21,48 @@ import ( "io" ) -// WriteTo implements io.WriterTo -func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := p.writeTo(writer) - if err != nil { - return n, err +func appendToSlice[T any](s []interface{}, v []T) []interface{} { + for i := range v { + s = append(s, v[i]) } - nBytes, err := writer.Write(p.Hash) - return int64(nBytes) + n, err + return s } -func (p *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &p.PublicKeys.Tau.SG, - &p.PublicKeys.Tau.SXG, - &p.PublicKeys.Tau.XR, - &p.PublicKeys.Alpha.SG, - &p.PublicKeys.Alpha.SXG, - &p.PublicKeys.Alpha.XR, - &p.PublicKeys.Beta.SG, - &p.PublicKeys.Beta.SXG, - &p.PublicKeys.Beta.XR, - p.Parameters.G1.Tau, - p.Parameters.G1.AlphaTau, - p.Parameters.G1.BetaTau, - p.Parameters.G2.Tau, - &p.Parameters.G2.Beta, +func (p *Phase1) toSlice() []interface{} { + N := len(p.G2Derived.Tau) + estimatedNbElems := 5*N + 5 + // size N 1 + // commitment, proof of knowledge, and 𝔾₁ representation for τ, α, and β 9 + // 𝔾₂ representation for τ and β 2 + // [τⁱ]₁ for 2 ≤ i ≤ 2N-2 2N-3 + // [τⁱ]₂ for 2 ≤ i ≤ N-1 N-2 + // [ατⁱ]₁ for 1 ≤ i ≤ N-1 N-1 + // [βτⁱ]₁ for 1 ≤ i ≤ N-1 N-1 + + toEncode := make([]interface{}, 1, estimatedNbElems) + + toEncode[0] = N + toEncode = p.Principal.Tau.appendRefsToSlice(toEncode) + toEncode = p.Principal.Alpha.appendRefsToSlice(toEncode) + toEncode = p.Principal.Beta.appendRefsToSlice(toEncode) + + toEncode = appendToSlice(toEncode, p.G1Derived.Tau[2:]) + toEncode = appendToSlice(toEncode, p.G2Derived.Tau[2:]) + toEncode = appendToSlice(toEncode, p.G1Derived.BetaTau[1:]) + toEncode = appendToSlice(toEncode, p.G1Derived.AlphaTau[1:]) + + if len(toEncode) != estimatedNbElems { + panic("incorrect length estimate") } + return toEncode +} + +// WriteTo implements io.WriterTo +func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.toSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } @@ -60,32 +72,21 @@ func (p *Phase1) writeTo(writer io.Writer) (int64, error) { // ReadFrom implements io.ReaderFrom func (p *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &p.PublicKeys.Tau.SG, - &p.PublicKeys.Tau.SXG, - &p.PublicKeys.Tau.XR, - &p.PublicKeys.Alpha.SG, - &p.PublicKeys.Alpha.SXG, - &p.PublicKeys.Alpha.XR, - &p.PublicKeys.Beta.SG, - &p.PublicKeys.Beta.SXG, - &p.PublicKeys.Beta.XR, - &p.Parameters.G1.Tau, - &p.Parameters.G1.AlphaTau, - &p.Parameters.G1.BetaTau, - &p.Parameters.G2.Tau, - &p.Parameters.G2.Beta, + var N uint64 + dec := curve.NewDecoder(reader) + if err := dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - dec := curve.NewDecoder(reader) - for _, v := range toEncode { + p.Initialize(N) + toDecode := p.toSlice() + + for _, v := range toDecode[1:] { // we've already decoded N if err := dec.Decode(v); err != nil { return dec.BytesRead(), err } } - p.Hash = make([]byte, 32) - nBytes, err := reader.Read(p.Hash) - return dec.BytesRead() + int64(nBytes), err + return dec.BytesRead(), nil } // WriteTo implements io.WriterTo @@ -179,3 +180,12 @@ func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { return dec.BytesRead(), nil } + +// appendRefsToSlice appends references to values in x to s +func (x *valueUpdate) appendRefsToSlice(s []interface{}) []interface{} { + s = append(s, &x.contributionCommitment, &x.contributionPok, &x.updatedCommitment.g1) + if x.updatedCommitment.g2 != nil { + return append(s, x.updatedCommitment.g2) + } + return s +} diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go index c8d7a6b004..9b69bb8e91 100644 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ b/backend/groth16/bn254/mpcsetup/marshal_test.go @@ -34,7 +34,8 @@ func TestContributionSerialization(t *testing.T) { assert := require.New(t) // Phase 1 - srs1 := NewPhase1(9) + var srs1 Phase1 + srs1.Initialize(1 << 9) srs1.Contribute() assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 7eddd73e78..ba2c082902 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -23,7 +23,6 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "math" "math/big" ) @@ -37,8 +36,8 @@ type Phase1 struct { } G1Derived struct { Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + AlphaTau []curve.G1Affine // {[ατ⁰]₁, [ατ¹]₁, [ατ²]₁, …, [ατⁿ⁻¹]₁} + BetaTau []curve.G1Affine // {[βτ⁰]₁, [βτ¹]₁, [βτ²]₁, …, [βτⁿ⁻¹]₁} } G2Derived struct { Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} @@ -84,29 +83,26 @@ func (p *Phase1) Contribute() { p.Challenge = challenge } -// NewPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func NewPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - +// Initialize an empty object of size N +func (p *Phase1) Initialize(N uint64) { _, _, g1, g2 := curve.Generators() - phase1.Challenge = []byte{0} - phase1.Principal.Alpha.setEmpty(true) - phase1.Principal.Beta.setEmpty(true) - phase1.Principal.Tau.setEmpty(false) + p.Challenge = []byte{0} + p.Principal.Alpha.setEmpty(true) + p.Principal.Beta.setEmpty(false) + p.Principal.Tau.setEmpty(false) - phase1.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) - phase1.G2Derived.Tau = make([]curve.G2Affine, N) - phase1.G1Derived.AlphaTau = make([]curve.G1Affine, N) - phase1.G1Derived.BetaTau = make([]curve.G1Affine, N) - for i := range phase1.G1Derived.Tau { - phase1.G1Derived.Tau[i].Set(&g1) + p.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) + p.G2Derived.Tau = make([]curve.G2Affine, N) + p.G1Derived.AlphaTau = make([]curve.G1Affine, N) + p.G1Derived.BetaTau = make([]curve.G1Affine, N) + for i := range p.G1Derived.Tau { + p.G1Derived.Tau[i].Set(&g1) } - for i := range phase1.G2Derived.Tau { - phase1.G2Derived.Tau[i].Set(&g2) - phase1.G1Derived.AlphaTau[i].Set(&g1) - phase1.G1Derived.BetaTau[i].Set(&g1) + for i := range p.G2Derived.Tau { + p.G2Derived.Tau[i].Set(&g2) + p.G1Derived.AlphaTau[i].Set(&g1) + p.G1Derived.BetaTau[i].Set(&g1) } return @@ -158,10 +154,10 @@ func (p *Phase1) Verify(previous *Phase1) error { r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths // will be reusing the coefficient TODO @Tabaie make sure that's okay - tauT1, tauS1 := linearCombinationsG1(r, p.G1Derived.Tau) - tauT2, tauS2 := linearCombinationsG2(r, p.G2Derived.Tau) - alphaTT, alphaTS := linearCombinationsG1(r, p.G1Derived.AlphaTau) - betaTT, betaTS := linearCombinationsG1(r, p.G1Derived.BetaTau) + tauT1, tauS1 := linearCombinationsG1(p.G1Derived.Tau[1:], r) + tauT2, tauS2 := linearCombinationsG2(p.G2Derived.Tau[1:], r) + alphaTT, alphaTS := linearCombinationsG1(p.G1Derived.AlphaTau, r) + betaTT, betaTS := linearCombinationsG1(p.G1Derived.BetaTau, r) if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") @@ -191,6 +187,7 @@ func (p *Phase1) hash() []byte { panic("challenge field missing") } sha := sha256.New() - p.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index c1a334b739..f6ba644893 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -148,29 +148,9 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { return } -// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₁ -func linearCombinationG1(r []fr.Element, A []curve.G1Affine, nbTasks int) curve.G1Affine { - n := len(A) - r = r[:n-1] - var res curve.G1Affine - res.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} - -// truncated = ∑ rᵢAᵢ, shifted = ∑ rᵢAᵢ₊₁∈𝔾₂ -func linearCombinationG2(r []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r = r[:n-1] - truncated.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - shifted.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} - // linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, shifted curve.G1Affine) { +func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, shifted curve.G1Affine) { // the common section, 1 to N-2 var common curve.G1Affine common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] @@ -187,7 +167,7 @@ func linearCombinationsG1(rPowers []fr.Element, A []curve.G1Affine) (truncated, // linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -func linearCombinationsG2(rPowers []fr.Element, A []curve.G2Affine) (truncated, shifted curve.G2Affine) { +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { // the common section, 1 to N-2 var common curve.G2Affine common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] From c352cd9611025c88e65b3c4459b417ef04aac80b Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 18 Oct 2024 17:16:42 -0500 Subject: [PATCH 011/105] refactor "Parameters" --- backend/groth16/bn254/mpcsetup/marshal.go | 135 ++++++++----- backend/groth16/bn254/mpcsetup/phase1.go | 192 ++++++++++++------- backend/groth16/bn254/mpcsetup/phase2.go | 1 + backend/groth16/bn254/mpcsetup/setup_test.go | 16 +- backend/groth16/bn254/mpcsetup/utils.go | 96 +++++----- 5 files changed, 265 insertions(+), 175 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 34c7b95a23..8f249bbda6 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -21,72 +21,58 @@ import ( "io" ) -func appendToSlice[T any](s []interface{}, v []T) []interface{} { +func appendRefs[T any](s []interface{}, v []T) []interface{} { for i := range v { - s = append(s, v[i]) + s = append(s, &v[i]) } return s } -func (p *Phase1) toSlice() []interface{} { - N := len(p.G2Derived.Tau) - estimatedNbElems := 5*N + 5 - // size N 1 - // commitment, proof of knowledge, and 𝔾₁ representation for τ, α, and β 9 - // 𝔾₂ representation for τ and β 2 - // [τⁱ]₁ for 2 ≤ i ≤ 2N-2 2N-3 - // [τⁱ]₂ for 2 ≤ i ≤ N-1 N-2 - // [ατⁱ]₁ for 1 ≤ i ≤ N-1 N-1 - // [βτⁱ]₁ for 1 ≤ i ≤ N-1 N-1 - - toEncode := make([]interface{}, 1, estimatedNbElems) - - toEncode[0] = N - toEncode = p.Principal.Tau.appendRefsToSlice(toEncode) - toEncode = p.Principal.Alpha.appendRefsToSlice(toEncode) - toEncode = p.Principal.Beta.appendRefsToSlice(toEncode) - - toEncode = appendToSlice(toEncode, p.G1Derived.Tau[2:]) - toEncode = appendToSlice(toEncode, p.G2Derived.Tau[2:]) - toEncode = appendToSlice(toEncode, p.G1Derived.BetaTau[1:]) - toEncode = appendToSlice(toEncode, p.G1Derived.AlphaTau[1:]) - - if len(toEncode) != estimatedNbElems { - panic("incorrect length estimate") +// proofRefsSlice produces a slice consisting of references to all proof sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (p *Phase1) proofRefsSlice() []interface{} { + return []interface{}{ + &p.proofs.Tau.contributionCommitment, + &p.proofs.Tau.contributionPok, + &p.proofs.Alpha.contributionCommitment, + &p.proofs.Alpha.contributionPok, + &p.proofs.Beta.contributionCommitment, + &p.proofs.Beta.contributionPok, } - - return toEncode } // WriteTo implements io.WriterTo -func (p *Phase1) WriteTo(writer io.Writer) (int64, error) { +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + + if n, err = p.parameters.WriteTo(writer); err != nil { + return + } enc := curve.NewEncoder(writer) - for _, v := range p.toSlice() { - if err := enc.Encode(v); err != nil { - return enc.BytesWritten(), err + for _, v := range p.proofRefsSlice() { + if err = enc.Encode(v); err != nil { + return n + enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + return n + enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (p *Phase1) ReadFrom(reader io.Reader) (int64, error) { - var N uint64 - dec := curve.NewDecoder(reader) - if err := dec.Decode(&N); err != nil { - return dec.BytesRead(), err - } +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { - p.Initialize(N) - toDecode := p.toSlice() + if n, err = p.parameters.ReadFrom(reader); err != nil { + return + } - for _, v := range toDecode[1:] { // we've already decoded N - if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + dec := curve.NewDecoder(reader) + for _, v := range p.proofRefsSlice() { // we've already decoded N + if err = dec.Decode(v); err != nil { + return n + dec.BytesRead(), err } } - return dec.BytesRead(), nil + return n + dec.BytesRead(), nil } // WriteTo implements io.WriterTo @@ -181,11 +167,56 @@ func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { return dec.BytesRead(), nil } -// appendRefsToSlice appends references to values in x to s -func (x *valueUpdate) appendRefsToSlice(s []interface{}) []interface{} { - s = append(s, &x.contributionCommitment, &x.contributionPok, &x.updatedCommitment.g1) - if x.updatedCommitment.g2 != nil { - return append(s, x.updatedCommitment.g2) +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []interface{} { + N := len(c.G2.Tau) + estimatedNbElems := 5*N - 1 + // size N 1 + // 𝔾₂ representation for β 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]interface{}, 1, estimatedNbElems) + refs[0] = N + + refs = appendRefs(refs, c.G1.Tau[1:]) + refs = appendRefs(refs, c.G2.Tau[1:]) + refs = appendRefs(refs, c.G1.BetaTau) + refs = appendRefs(refs, c.G1.AlphaTau) + + if len(refs) != estimatedNbElems { + panic("incorrect length estimate") } - return s + + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { + if err := enc.Encode(v); err != nil { + return enc.BytesWritten(), err + } + } + return enc.BytesWritten(), nil +} + +// ReadFrom implements io.ReaderFrom +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 + dec := curve.NewDecoder(reader) + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err + } + + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { + return dec.BytesRead(), err + } + } + return dec.BytesRead(), nil } diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index ba2c082902..4b8df877e7 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -1,4 +1,4 @@ -// Copyright 2020 ConsenSys Software Inc. +// Copyright 2020 Consensys Software Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -31,81 +31,126 @@ import ( // // Also known as "Powers of Tau" type Phase1 struct { - Principal struct { // "main" contributions + proofs struct { // "main" contributions Tau, Alpha, Beta valueUpdate } - G1Derived struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {[ατ⁰]₁, [ατ¹]₁, [ατ²]₁, …, [ατⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {[βτ⁰]₁, [βτ¹]₁, [βτ²]₁, …, [βτⁿ⁻¹]₁} - } - G2Derived struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - } - Challenge []byte // Hash of the transcript PRIOR to this participant + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } // Contribute contributes randomness to the Phase1 object. This mutates Phase1. // p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. func (p *Phase1) Contribute() { - N := len(p.G2Derived.Tau) - challenge := p.hash() + p.Challenge = p.hash() // Generate main value updates - var tau, alpha, beta *big.Int - p.Principal.Tau, tau = updateValue(p.Principal.Tau.updatedCommitment, challenge, 1) - p.Principal.Alpha, alpha = updateValue(p.Principal.Alpha.updatedCommitment, challenge, 2) - p.Principal.Beta, beta = updateValue(p.Principal.Beta.updatedCommitment, challenge, 3) + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + p.proofs.Tau, p.parameters.G1.Tau[1], tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) + p.proofs.Alpha, p.parameters.G1.AlphaTau[0], alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) + p.proofs.Beta, p.parameters.G1.BetaTau[0], betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib, true) +} - defer eraseBigInts(tau, alpha, beta) +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} - // Compute τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-2) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() +} - defer eraseFrVectors(taus, alphaTau, betaTau) +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + for i := range c.G1.Tau { + c.G1.Tau[i] = c.G1.Tau[0] + } + for i := range c.G1.AlphaTau { + c.G1.AlphaTau[i] = c.G1.AlphaTau[0] + c.G1.BetaTau[i] = c.G1.AlphaTau[0] + c.G2.Tau[i] = c.G2.Tau[0] + } + c.G2.Beta = c.G2.Tau[0] +} - alphaTau[0].SetBigInt(alpha) - betaTau[0].SetBigInt(beta) - for i := 1; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alphaTau[0]) - betaTau[i].Mul(&taus[i], &betaTau[0]) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element, principalG1sPrecomputed bool) { + i0 := 0 + if principalG1sPrecomputed { + i0 = 1 } - // Update using previous parameters // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(p.G1Derived.Tau, taus) - scaleG2InPlace(p.G2Derived.Tau, taus[0:N]) - scaleG1InPlace(p.G1Derived.AlphaTau, alphaTau) - scaleG1InPlace(p.G1Derived.BetaTau, betaTau) - - p.Challenge = challenge -} -// Initialize an empty object of size N -func (p *Phase1) Initialize(N uint64) { - _, _, g1, g2 := curve.Generators() + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[i0+1:], tauUpdates[i0+1:]) // first element remains 1. second element may have been precomputed. + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - p.Challenge = []byte{0} - p.Principal.Alpha.setEmpty(true) - p.Principal.Beta.setEmpty(false) - p.Principal.Tau.setEmpty(false) + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := i0; i < len(alphaUpdates); i++ { + alphaUpdates[i].Mul(&tauUpdates[i], &alphaUpdates[1]) + } + scaleG1InPlace(c.G1.AlphaTau[i0:], alphaUpdates[i0:]) // first element may have been precomputed - p.G1Derived.Tau = make([]curve.G1Affine, 2*N-1) - p.G2Derived.Tau = make([]curve.G2Affine, N) - p.G1Derived.AlphaTau = make([]curve.G1Affine, N) - p.G1Derived.BetaTau = make([]curve.G1Affine, N) - for i := range p.G1Derived.Tau { - p.G1Derived.Tau[i].Set(&g1) + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := i0; i < len(betaUpdates); i++ { + alphaUpdates[i].Mul(&tauUpdates[i], &betaUpdates[1]) } - for i := range p.G2Derived.Tau { - p.G2Derived.Tau[i].Set(&g2) - p.G1Derived.AlphaTau[i].Set(&g1) - p.G1Derived.BetaTau[i].Set(&g1) + scaleG1InPlace(c.G1.BetaTau[i0:], betaUpdates[i0:]) + + var betaUpdateI big.Int + betaUpdate.SetBigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + var ( + bb bytes.Buffer + err error + ) + bb.Write(p.hash()) + bb.Write(beaconChallenge) + + newContribs := make([]fr.Element, 3) + // cryptographically unlikely for this to be run more than once + for newContribs[0].IsZero() || newContribs[1].IsZero() || newContribs[2].IsZero() { + if newContribs, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), 3); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time } - return + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2], false) + + return p.parameters } func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { @@ -128,20 +173,29 @@ func (p *Phase1) Verify(previous *Phase1) error { p.Challenge = prevHash } - if err := p.Principal.Tau.verify(previous.Principal.Tau.updatedCommitment, p.Challenge, 1); err != nil { + if err := p.proofs.Tau.verify( + pair{previous.parameters.G1.Tau[1], &previous.parameters.G2.Tau[1]}, + pair{p.parameters.G1.Tau[1], &p.parameters.G2.Tau[1]}, + p.Challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := p.Principal.Alpha.verify(previous.Principal.Alpha.updatedCommitment, p.Challenge, 2); err != nil { + if err := p.proofs.Alpha.verify( + pair{previous.parameters.G1.AlphaTau[0], nil}, + pair{p.parameters.G1.AlphaTau[0], nil}, + p.Challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := p.Principal.Beta.verify(previous.Principal.Beta.updatedCommitment, p.Challenge, 3); err != nil { + if err := p.proofs.Beta.verify( + pair{previous.parameters.G1.BetaTau[0], &previous.parameters.G2.Beta}, + pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, + p.Challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(p.G1Derived.Tau) || !areInSubGroupG1(p.G1Derived.BetaTau) || !areInSubGroupG1(p.G1Derived.AlphaTau) { + if !areInSubGroupG1(p.parameters.G1.Tau[2:]) || !areInSubGroupG1(p.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(p.parameters.G1.AlphaTau[1:]) { return errors.New("derived values 𝔾₁ subgroup check failed") } - if !areInSubGroupG2(p.G2Derived.Tau) { + if !areInSubGroupG2(p.parameters.G2.Tau[2:]) { return errors.New("derived values 𝔾₂ subgroup check failed") } @@ -151,19 +205,19 @@ func (p *Phase1) Verify(previous *Phase1) error { // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ - r := linearCombCoeffs(len(p.G1Derived.Tau) - 1) // the longest of all lengths + r := linearCombCoeffs(len(p.parameters.G1.Tau) - 1) // the longest of all lengths // will be reusing the coefficient TODO @Tabaie make sure that's okay - tauT1, tauS1 := linearCombinationsG1(p.G1Derived.Tau[1:], r) - tauT2, tauS2 := linearCombinationsG2(p.G2Derived.Tau[1:], r) - alphaTT, alphaTS := linearCombinationsG1(p.G1Derived.AlphaTau, r) - betaTT, betaTS := linearCombinationsG1(p.G1Derived.BetaTau, r) + tauT1, tauS1 := linearCombinationsG1(p.parameters.G1.Tau[1:], r) + tauT2, tauS2 := linearCombinationsG2(p.parameters.G2.Tau[1:], r) + alphaTT, alphaTS := linearCombinationsG1(p.parameters.G1.AlphaTau, r) + betaTT, betaTS := linearCombinationsG1(p.parameters.G1.BetaTau, r) - if !sameRatioUnsafe(tauS1, tauT1, *p.Principal.Tau.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(tauS1, tauT1, p.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") } - if !sameRatioUnsafe(p.Principal.Tau.updatedCommitment.g1, g1, tauS2, tauT2) { + if !sameRatioUnsafe(p.parameters.G1.Tau[1], g1, tauS2, tauT2) { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } @@ -172,13 +226,15 @@ func (p *Phase1) Verify(previous *Phase1) error { // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ // with a similar bi-linearity argument as above we can do this with a single pairing check - if !sameRatioUnsafe(alphaTS, alphaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(alphaTS, alphaTT, p.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify the ατⁱ") } - if !sameRatioUnsafe(betaTS, betaTT, *p.Principal.Tau.updatedCommitment.g2, g2) { + if !sameRatioUnsafe(betaTS, betaTT, p.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify the βτⁱ") } + // TODO @Tabaie combine all pairing checks except the second one + return nil } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 3fcafb30da..058489e28f 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -51,6 +51,7 @@ type Phase2 struct { } func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { + srs := srs1.Parameters size := len(srs.G1.AlphaTau) if size < r1cs.GetNbConstraints() { diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 3689969c69..cb71d9ea6a 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -40,12 +40,13 @@ func TestSetupCircuit(t *testing.T) { assert := require.New(t) - srs1 := NewPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) // Make and verify contributions for phase1 for i := 1; i < nContributionsPhase1; i++ { // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. + // add its contribution and send back to coordinator. prev := srs1.clone() srs1.Contribute() @@ -66,7 +67,7 @@ func TestSetupCircuit(t *testing.T) { // Make and verify contributions for phase1 for i := 1; i < nContributionsPhase2; i++ { // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. + // add its contribution and send back to coordinator. prev := srs2.clone() srs2.Contribute() @@ -103,13 +104,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = NewPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := NewPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -120,7 +123,8 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := NewPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) srs1.Contribute() var myCircuit Circuit diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f6ba644893..a868b43f68 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -18,7 +18,6 @@ package mpcsetup import ( "bytes" - "crypto/rand" "errors" "math/big" "math/bits" @@ -73,27 +72,42 @@ func bitReverse[T any](a []T) { } func linearCombCoeffs(n int) []fr.Element { - a, err := rand.Int(rand.Reader, fr.Modulus()) - if err != nil { + var a fr.Element + if _, err := a.SetRandom(); err != nil { panic(err) } - return powers(a, n) + return powers(&a, n) } -// Returns [1, a, a², ..., aⁿ⁻¹ ] -func powers(a *big.Int, n int) []fr.Element { +func powersI(a *big.Int, n int) []fr.Element { var aMont fr.Element aMont.SetBigInt(a) + return powers(&aMont, n) +} + +// Returns [1, a, a², ..., aⁿ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &aMont) + if n >= 1 { + result[0] = fr.NewElement(1) + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } // Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -104,7 +118,12 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { } // Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -217,69 +236,66 @@ func (p *pair) validUpdate() bool { type valueUpdate struct { contributionCommitment curve.G1Affine // x or [Xⱼ]₁ contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ - updatedCommitment pair // [X₁..Xⱼ] + //updatedCommitment pair // [X₁..Xⱼ] } // updateValue produces values associated with contribution to an existing value. // if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment // the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func updateValue(prevCommitment pair, challenge []byte, dst byte) (valueUpdate, *big.Int) { - var x valueUpdate - contributionValue, err := rand.Int(rand.Reader, fr.Modulus()) - - if err != nil { +func updateValue(prev curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, updated curve.G1Affine, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { panic(err) } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) _, _, g1, _ := curve.Generators() - x.contributionCommitment.ScalarMultiplication(&g1, contributionValue) - x.updatedCommitment.g1.ScalarMultiplication(&prevCommitment.g1, contributionValue) - if prevCommitment.g2 != nil { // TODO make sure this is correct - x.updatedCommitment.g2 = new(curve.G2Affine).ScalarMultiplication(prevCommitment.g2, contributionValue) - } + proof.contributionCommitment.ScalarMultiplication(&g1, &contributionValueI) + updated.ScalarMultiplication(&prev, &contributionValueI) // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // r - x.contributionPok.ScalarMultiplication(&pokBase, contributionValue) + pokBase := genR(proof.contributionCommitment, updated, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - return x, contributionValue + return } // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. // prevCommitment is assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(prevCommitment pair, challenge []byte, dst byte) error { - noG2 := prevCommitment.g2 == nil - if noG2 != (x.updatedCommitment.g2 == nil) { +func (x *valueUpdate) verify(prev, updated pair, challenge []byte, dst byte) error { + noG2 := prev.g2 == nil + if noG2 != (updated.g2 == nil) { return errors.New("erasing or creating g2 values") } - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !x.updatedCommitment.validUpdate() { + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !updated.validUpdate() { return errors.New("contribution values subgroup check failed") } // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, x.updatedCommitment.g1, challenge, dst) // verification challenge in the form of a g2 base + r := genR(x.contributionCommitment, updated.g1, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, *x.updatedCommitment.g2, *prevCommitment.g2) { + if !noG2 && !sameRatioUnsafe(updated.g1, prev.g1, *updated.g2, *prev.g2) { return errors.New("g2 update inconsistent") } // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(x.updatedCommitment.g1, prevCommitment.g1, x.contributionPok, r) { + if !sameRatioUnsafe(updated.g1, prev.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } return nil } +/* // setEmpty does not provide proofs, only sets the value to [1] func (x *valueUpdate) setEmpty(g1Only bool) { _, _, g1, g2 := curve.Generators() @@ -287,7 +303,7 @@ func (x *valueUpdate) setEmpty(g1Only bool) { if !g1Only { x.updatedCommitment.g2 = &g2 } -} +}*/ func toRefs[T any](s []T) []*T { res := make([]*T, len(s)) @@ -317,21 +333,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { func truncate[T any](s []T) []T { return s[:len(s)-1] } - -func eraseBigInts(i ...*big.Int) { - for _, i := range i { - if i != nil { - for j := range i.Bits() { - i.Bits()[j] = 0 - } - } - } -} - -func eraseFrVectors(v ...[]fr.Element) { - for _, v := range v { - for i := range v { - v[i].SetZero() - } - } -} From 90f4ed78a18b02506cf9421a507c1c8d594ef7df Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:11:56 -0500 Subject: [PATCH 012/105] chore some comments and name changes --- backend/groth16/bn254/mpcsetup/lagrange.go | 4 +- backend/groth16/bn254/mpcsetup/marshal.go | 4 +- backend/groth16/bn254/mpcsetup/phase2.go | 66 ++++++++++++-------- backend/groth16/bn254/mpcsetup/setup.go | 2 +- backend/groth16/bn254/mpcsetup/setup_test.go | 2 +- 5 files changed, 46 insertions(+), 32 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/lagrange.go b/backend/groth16/bn254/mpcsetup/lagrange.go index 886e489248..54bb692ed1 100644 --- a/backend/groth16/bn254/mpcsetup/lagrange.go +++ b/backend/groth16/bn254/mpcsetup/lagrange.go @@ -86,7 +86,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -114,7 +114,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 8f249bbda6..b82c8a44ac 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -92,7 +92,7 @@ func (c *Phase2) writeTo(writer io.Writer) (int64, error) { &c.PublicKey.SXG, &c.PublicKey.XR, &c.Parameters.G1.Delta, - c.Parameters.G1.L, + c.Parameters.G1.PKK, c.Parameters.G1.Z, &c.Parameters.G2.Delta, } @@ -114,7 +114,7 @@ func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { &c.PublicKey.SXG, &c.PublicKey.XR, &c.Parameters.G1.Delta, - &c.Parameters.G1.L, + &c.Parameters.G1.PKK, &c.Parameters.G1.Z, &c.Parameters.G2.Delta, } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 058489e28f..65f12402d2 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -19,6 +19,7 @@ package mpcsetup import ( "crypto/sha256" "errors" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" "math/big" curve "github.com/consensys/gnark-crypto/ecc/bn254" @@ -29,36 +30,47 @@ import ( type Phase2Evaluations struct { G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + PKK, Z []curve.G1Affine // Z is the domain vanishing polynomial } G2 struct { Delta curve.G2Affine } + CommitmentKeys pedersen.ProvingKey } - PublicKey PublicKey + Sigmas []valueUpdate // commitment key secrets + PublicKey PublicKey // commitment to delta Hash []byte } +// Init is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +func (p *Phase2) Init(commons SrsCommons) { + +} + func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters + srs := srs1.parameters size := len(srs.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} + var c2 Phase2 accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() @@ -101,26 +113,28 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... internal, secret, public := r1cs.GetNbVariables() nWires := internal + secret + public var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -154,9 +168,9 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { bitReverse(c2.Parameters.G1.Z) c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - // Evaluate L + // Evaluate PKK nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) + c2.Parameters.G1.PKK = make([]curve.G1Affine, nPrivate) evals.G1.VKK = make([]curve.G1Affine, public) offset := public for i := 0; i < nWires; i++ { @@ -166,7 +180,7 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { if i < public { evals.G1.VKK[i].Set(&tmp) } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) + c2.Parameters.G1.PKK[i-offset].Set(&tmp) } } // Set δ public key @@ -201,9 +215,9 @@ func (c *Phase2) Contribute() { c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) + // Update PKK using δ⁻¹ + for i := 0; i < len(c.Parameters.G1.PKK); i++ { + c.Parameters.G1.PKK[i].ScalarMultiplication(&c.Parameters.G1.PKK[i], &deltaInvBI) } // 4. Hash contribution @@ -237,14 +251,14 @@ func verifyPhase2(current, contribution *Phase2) error { return errors.New("couldn't verify that [δ]₂ is based on previous contribution") } - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) + // Check for valid updates of PKK and Z using + L, prevL := merge(contribution.Parameters.G1.PKK, current.Parameters.G1.PKK) if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") + return errors.New("couldn't verify valid updates of PKK using δ⁻¹") } Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") + return errors.New("couldn't verify valid updates of PKK using δ⁻¹") } // Check hash of the contribution diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 4946e9f597..0f7ff26d99 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -33,7 +33,7 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G1.Z = srs2.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L + pk.G1.K = srs2.Parameters.G1.PKK pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index cb71d9ea6a..b4b013c46b 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -190,7 +190,7 @@ func (p *Phase1) clone() Phase1 { func (phase2 *Phase2) clone() Phase2 { r := Phase2{} r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) + r.Parameters.G1.PKK = append(r.Parameters.G1.PKK, phase2.Parameters.G1.PKK...) r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) r.Parameters.G2.Delta = phase2.Parameters.G2.Delta r.PublicKey = phase2.PublicKey From 7afabbfb1fab2b476774aa8a72f3f2c1c1b82402 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 24 Oct 2024 13:09:31 -0500 Subject: [PATCH 013/105] feat phase2 init --- backend/groth16/bn254/mpcsetup/phase2.go | 68 ++++++++++++++++-------- backend/groth16/bn254/setup.go | 26 ++++----- backend/groth16/internal/utils.go | 58 ++++++++++++++++++++ 3 files changed, 114 insertions(+), 38 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 65f12402d2..6d739dbe45 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -20,7 +20,9 @@ import ( "crypto/sha256" "errors" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark/backend/groth16/internal" "math/big" + "slices" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" @@ -32,7 +34,7 @@ type Phase2Evaluations struct { G1 struct { A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ - VKK []curve.G1Affine // VKK are the coefficients of the public witness + VKK []curve.G1Affine // VKK are the coefficients of the public witness (and commitments) } G2 struct { B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ @@ -42,17 +44,19 @@ type Phase2Evaluations struct { type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - PKK, Z []curve.G1Affine // Z is the domain vanishing polynomial + Delta curve.G1Affine + Z []curve.G1Affine // Z is the domain vanishing polynomial + PKK []curve.G1Affine // PKK are the coefficients of the private witness } G2 struct { Delta curve.G2Affine + Sigma curve.G2Affine } - CommitmentKeys pedersen.ProvingKey + CommitmentKeys []pedersen.ProvingKey } - Sigmas []valueUpdate // commitment key secrets - PublicKey PublicKey // commitment to delta - Hash []byte + Sigmas []valueUpdate // commitment key secrets + Delta valueUpdate // updates to delta + Hash []byte } // Init is to be run by the coordinator @@ -118,8 +122,8 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance @@ -157,6 +161,7 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { _, _, g1, g2 := curve.Generators() c2.Parameters.G1.Delta = g1 c2.Parameters.G2.Delta = g2 + c2.Parameters.G2.Sigma = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] @@ -168,35 +173,56 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { bitReverse(c2.Parameters.G1.Z) c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + c2.Sigmas = make([]valueUpdate, len(commitments)) + c2.Parameters.CommitmentKeys = make([]pedersen.ProvingKey, len(commitments)) + for j := range commitments { + c2.Parameters.CommitmentKeys[i].Basis = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) + // Evaluate PKK - nPrivate := internal + secret - c2.Parameters.G1.PKK = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public + + c2.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 for i := 0; i < nWires; i++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C var tmp curve.G1Affine tmp.Add(&bA[i], &aB[i]) tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) + commitmentIndex := committedIterator.IndexIfNext(i) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == i + if commitmentIndex != -1 { + c2.Parameters.CommitmentKeys[commitmentIndex].Basis = append(c2.Parameters.CommitmentKeys[commitmentIndex].Basis, tmp) + } else if i < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) } else { - c2.Parameters.G1.PKK[i-offset].Set(&tmp) + c2.Parameters.G1.PKK = append(c2.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) + + for i := range commitments { + c2.Parameters.CommitmentKeys[i].BasisExpSigma = slices.Clone(c2.Parameters.CommitmentKeys[i].Basis) + } // Hash initial contribution - c2.Hash = c2.hash() + c2.Hash = c2.hash() // TODO remove return c2, evals } func (c *Phase2) Contribute() { // Sample toxic δ var delta, deltaInv fr.Element + var sigma fr.Element var deltaBI, deltaInvBI big.Int + + updateValue() + delta.SetRandom() deltaInv.Inverse(&delta) diff --git a/backend/groth16/bn254/setup.go b/backend/groth16/bn254/setup.go index 13ddcd61d3..6ea996b146 100644 --- a/backend/groth16/bn254/setup.go +++ b/backend/groth16/bn254/setup.go @@ -144,7 +144,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -156,37 +156,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/internal/utils.go b/backend/groth16/internal/utils.go index 6062ef57ce..67648b2104 100644 --- a/backend/groth16/internal/utils.go +++ b/backend/groth16/internal/utils.go @@ -1,5 +1,10 @@ package internal +import ( + "math" + "slices" +) + func ConcatAll(slices ...[]int) []int { // copyright note: written by GitHub Copilot totalLen := 0 for _, s := range slices { @@ -20,3 +25,56 @@ func NbElements(slices [][]int) int { // copyright note: written by GitHub Copil } return totalLen } + +// NewMergeIterator assumes that all slices in s are sorted +func NewMergeIterator(s [][]int) *MergeIterator { + res := &MergeIterator{slices: slices.Clone(s)} + res.findLeast() + return res +} + +// MergeIterator iterates through a merging of multiple sorted slices +type MergeIterator struct { + slices [][]int + leastIndex int +} + +func (i *MergeIterator) findLeast() { + value := math.MaxInt + i.leastIndex = -1 + for j := range i.slices { + if len(i.slices[j]) == 0 { + continue + } + if v := i.slices[j][0]; v < value { + value = v + i.leastIndex = j + } + } + return +} + +// Peek returns the next smallest value and the index of the slice it came from +// If the iterator is empty, Peek returns (math.MaxInt, -1) +func (i *MergeIterator) Peek() (value, index int) { + return i.slices[i.leastIndex][0], i.leastIndex +} + +// Next returns the next smallest value and the index of the slice it came from, and advances the iterator +// If the iterator is empty, Next returns (math.MaxInt, -1) +func (i *MergeIterator) Next() (value, index int) { + value, index = i.Peek() + i.findLeast() + i.slices[i.leastIndex] = i.slices[i.leastIndex][1:] + return +} + +// IndexIfNext returns the index of the slice and advances the iterator if the next value is value, otherwise returns -1 +// If the iterator is empty, IndexIfNext returns -1 +func (i *MergeIterator) IndexIfNext(value int) int { + if v, index := i.Peek(); v == value { + i.Next() + return index + } + return -1 +} From b4bbb2ddbca558adc27402b2b03b5bf8f3a0744f Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 9 Dec 2024 18:02:28 -0600 Subject: [PATCH 014/105] checkpoint --- backend/groth16/bn254/mpcsetup/marshal.go | 6 +- backend/groth16/bn254/mpcsetup/phase1.go | 79 +++++--- backend/groth16/bn254/mpcsetup/phase2.go | 200 ++++++++++++------- backend/groth16/bn254/mpcsetup/setup_test.go | 2 +- backend/groth16/bn254/mpcsetup/utils.go | 57 +++--- 5 files changed, 206 insertions(+), 138 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index b82c8a44ac..b3bd05063c 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -81,7 +81,7 @@ func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) + nBytes, err := writer.Write(phase2.Challenge) return int64(nBytes) + n, err } @@ -125,8 +125,8 @@ func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) + c.Challenge = make([]byte, 32) + n, err := reader.Read(c.Challenge) return int64(n) + dec.BytesRead(), err } diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 4b8df877e7..3a6dddc5c1 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -21,9 +21,11 @@ import ( "crypto/sha256" "errors" "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math/big" + "runtime" ) // Phase1 represents the Phase1 of the MPC described in @@ -47,9 +49,9 @@ func (p *Phase1) Contribute() { var ( tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, p.parameters.G1.Tau[1], tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) - p.proofs.Alpha, p.parameters.G1.AlphaTau[0], alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) - p.proofs.Beta, p.parameters.G1.BetaTau[0], betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) + p.proofs.Tau, tauContrib = updateValue(&p.parameters.G1.Tau[1], p.Challenge, 1) + p.proofs.Alpha, alphaContrib = updateValue(&p.parameters.G1.AlphaTau[0], p.Challenge, 2) + p.proofs.Beta, betaContrib = updateValue(&p.parameters.G1.BetaTau[0], p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib, true) } @@ -164,38 +166,55 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { } // Verify assumes previous is correct -func (p *Phase1) Verify(previous *Phase1) error { +func (p *Phase1) Verify(next *Phase1) error { - if prevHash := previous.hash(); !bytes.Equal(p.Challenge, previous.hash()) { // if chain-verifying contributions, challenge fields are optional as they can be computed as we go - if len(p.Challenge) != 0 { + if prevHash := p.hash(); !bytes.Equal(next.Challenge, p.hash()) { // if chain-verifying contributions, challenge fields are optional as they can be computed as we go + if len(next.Challenge) != 0 { return errors.New("the challenge does not match the previous phase's hash") } - p.Challenge = prevHash + next.Challenge = prevHash } - if err := p.proofs.Tau.verify( - pair{previous.parameters.G1.Tau[1], &previous.parameters.G2.Tau[1]}, + // TODO compare sizes + + r := linearCombCoeffs(len(next.parameters.G1.Tau) - 1) // the longest of all lengths + // will be reusing the coefficients TODO @Tabaie make sure that's okay + + N := len(next.parameters.G2.Tau) + var taus, alphaTaus, betaTaus curve.G1Affine + if _, err := taus.MultiExp(next.parameters.G1.Tau[1:N], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // τ¹ + r.τ² + … + rᴺ⁻².τⁿ⁻¹ + return err + } + if _, err := alphaTaus.MultiExp(next.parameters.G1.AlphaTau[1:], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // ατ¹ + r.ατ² + … + rᴺ⁻².ατⁿ⁻¹ + return err + } + if _, err := betaTaus.MultiExp(next.parameters.G1.BetaTau[1:], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // βτ¹ + r.βτ² + … + rᴺ⁻².βτⁿ⁻¹ + return err + } + + if err := next.proofs.Tau.verify( pair{p.parameters.G1.Tau[1], &p.parameters.G2.Tau[1]}, - p.Challenge, 1); err != nil { + pair{next.parameters.G1.Tau[1], &next.parameters.G2.Tau[1]}, + next.Challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := p.proofs.Alpha.verify( - pair{previous.parameters.G1.AlphaTau[0], nil}, - pair{p.parameters.G1.AlphaTau[0], nil}, - p.Challenge, 2); err != nil { + if err := next.proofs.Alpha.verify( // TODO Get ACTUAL updated tau + pair{taus, nil}, + pair{alphaTaus, nil}, + next.Challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := p.proofs.Beta.verify( - pair{previous.parameters.G1.BetaTau[0], &previous.parameters.G2.Beta}, - pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, - p.Challenge, 3); err != nil { + if err := next.proofs.Beta.verify( + pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, // TODO @Tabaie combine the verification of all βτⁱ + pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, + next.Challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(p.parameters.G1.Tau[2:]) || !areInSubGroupG1(p.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(p.parameters.G1.AlphaTau[1:]) { + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { return errors.New("derived values 𝔾₁ subgroup check failed") } - if !areInSubGroupG2(p.parameters.G2.Tau[2:]) { + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { return errors.New("derived values 𝔾₂ subgroup check failed") } @@ -205,19 +224,17 @@ func (p *Phase1) Verify(previous *Phase1) error { // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ - r := linearCombCoeffs(len(p.parameters.G1.Tau) - 1) // the longest of all lengths - // will be reusing the coefficient TODO @Tabaie make sure that's okay - tauT1, tauS1 := linearCombinationsG1(p.parameters.G1.Tau[1:], r) - tauT2, tauS2 := linearCombinationsG2(p.parameters.G2.Tau[1:], r) - alphaTT, alphaTS := linearCombinationsG1(p.parameters.G1.AlphaTau, r) - betaTT, betaTS := linearCombinationsG1(p.parameters.G1.BetaTau, r) + tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r) + tauT2, tauS2 := linearCombinationsG2(next.parameters.G2.Tau[1:], r) + alphaTT, alphaTS := linearCombinationsG1(next.parameters.G1.AlphaTau, r) + betaTT, betaTS := linearCombinationsG1(next.parameters.G1.BetaTau, r) - if !sameRatioUnsafe(tauS1, tauT1, p.parameters.G2.Tau[1], g2) { + if !sameRatioUnsafe(tauS1, tauT1, next.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") } - if !sameRatioUnsafe(p.parameters.G1.Tau[1], g1, tauS2, tauT2) { + if !sameRatioUnsafe(next.parameters.G1.Tau[1], g1, tauS2, tauT2) { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } @@ -226,10 +243,12 @@ func (p *Phase1) Verify(previous *Phase1) error { // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ // with a similar bi-linearity argument as above we can do this with a single pairing check - if !sameRatioUnsafe(alphaTS, alphaTT, p.parameters.G2.Tau[1], g2) { + // TODO eliminate these by combining with update checking + + if !sameRatioUnsafe(alphaTS, alphaTT, next.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify the ατⁱ") } - if !sameRatioUnsafe(betaTS, betaTT, p.parameters.G2.Tau[1], g2) { + if !sameRatioUnsafe(betaTS, betaTT, next.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify the βτⁱ") } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 6d739dbe45..6c6c15f7a3 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -17,9 +17,10 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "fmt" "github.com/consensys/gnark/backend/groth16/internal" "math/big" "slices" @@ -32,9 +33,10 @@ import ( type Phase2Evaluations struct { G1 struct { - A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ - B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ - VKK []curve.G1Affine // VKK are the coefficients of the public witness (and commitments) + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ @@ -44,19 +46,111 @@ type Phase2Evaluations struct { type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - Z []curve.G1Affine // Z is the domain vanishing polynomial - PKK []curve.G1Affine // PKK are the coefficients of the private witness + Delta curve.G1Affine + Z []curve.G1Affine // Z are multiples of the domain vanishing polynomial + PKK []curve.G1Affine // PKK are the coefficients of the private witness + SigmaCKK [][]curve.G1Affine // Commitment bases } G2 struct { Delta curve.G2Affine - Sigma curve.G2Affine + Sigma []curve.G2Affine } - CommitmentKeys []pedersen.ProvingKey } - Sigmas []valueUpdate // commitment key secrets - Delta valueUpdate // updates to delta - Hash []byte + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (c *Phase2) Verify(next *Phase2) error { + if challenge := c.hash(); len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous phase's hash") + } + + if err := next.Delta.verify( + pair{c.Parameters.G1.Delta, &c.Parameters.G2.Delta}, + pair{next.Parameters.G1.Delta, &next.Parameters.G2.Delta}, + next.Challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + for i := range c.Sigmas { + if err := next.Sigmas[i].verify( + pair{c.Parameters.G1.SigmaCKK[i][0], &c.Parameters.G2.Sigma[i]}, + pair{next.Parameters.G1.SigmaCKK[i][0], &next.Parameters.G2.Sigma[i]}, + next.Challenge, byte(2+i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) + } + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i][1:]) { + return errors.New("commitment proving key subgroup check failed") + } + } + + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z)) + + for i := range c.Sigmas { + prevComb, nextComb := linearCombination(c.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], r) + if !sameRatioUnsafe(nextComb, prevComb, next.Parameters.G2.Sigma[i], c.Parameters.G2.Sigma[i]) { + return fmt.Errorf("failed to verify contribution to σ[%d]", i) + } + } + + linearCombination() +} + +func (c *Phase2) Contribute() { + // Sample toxic δ + var delta, deltaInv fr.Element + var deltaBI, deltaInvBI big.Int + + c.Challenge = c.hash() + + if len(c.Parameters.G1.SigmaCKK) > 255 { + panic("too many commitments") // DST collision + } + for i := range c.Parameters.G1.SigmaCKK { + var ( + sigmaContribution fr.Element + sigmaContributionI big.Int + ) + + pk := c.Parameters.G1.SigmaCKK[i] + c.Sigmas[i], sigmaContribution = updateValue(&pk[0], c.Challenge, byte(2+i)) + sigmaContribution.BigInt(&sigmaContributionI) + for j := 1; j < len(pk); j++ { + pk[j].ScalarMultiplication(&pk[j], &sigmaContributionI) + } + c.Parameters.G2.Sigma[i].ScalarMultiplication(&c.Parameters.G2.Sigma[i], &sigmaContributionI) + } + + c.Delta, delta = updateValue(&c.Parameters.G1.Delta, c.Challenge, 1) + + deltaInv.Inverse(&delta) + delta.BigInt(&deltaBI) + deltaInv.BigInt(&deltaInvBI) + + // Update [δ]₂ + c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + + c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) + c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + + // Update Z using δ⁻¹ + for i := 0; i < len(c.Parameters.G1.Z); i++ { + c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + } + + // Update PKK using δ⁻¹ + for i := 0; i < len(c.Parameters.G1.PKK); i++ { + c.Parameters.G1.PKK[i].ScalarMultiplication(&c.Parameters.G1.PKK[i], &deltaInvBI) + } } // Init is to be run by the coordinator @@ -161,24 +255,29 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { _, _, g1, g2 := curve.Generators() c2.Parameters.G1.Delta = g1 c2.Parameters.G2.Delta = g2 - c2.Parameters.G2.Sigma = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] n := len(srs.G1.AlphaTau) c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { + for i := 0; i < n-1; i++ { // TODO @Tabaie why is the last element always 0? c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) } bitReverse(c2.Parameters.G1.Z) c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) c2.Sigmas = make([]valueUpdate, len(commitments)) - c2.Parameters.CommitmentKeys = make([]pedersen.ProvingKey, len(commitments)) + c2.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + c2.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) + for j := range commitments { - c2.Parameters.CommitmentKeys[i].Basis = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + evals.G1.CKK[i] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + c2.Parameters.G2.Sigma[j] = g2 } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) // Evaluate PKK @@ -187,16 +286,16 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) nbCommitmentsSeen := 0 - for i := 0; i < nWires; i++ { + for j := 0; j < nWires; j++ { // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - commitmentIndex := committedIterator.IndexIfNext(i) - isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == i + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j if commitmentIndex != -1 { - c2.Parameters.CommitmentKeys[commitmentIndex].Basis = append(c2.Parameters.CommitmentKeys[commitmentIndex].Basis, tmp) - } else if i < nbPublic || isCommitment { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { evals.G1.VKK = append(evals.G1.VKK, tmp) } else { c2.Parameters.G1.PKK = append(c2.Parameters.G1.PKK, tmp) @@ -206,50 +305,15 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } } - for i := range commitments { - c2.Parameters.CommitmentKeys[i].BasisExpSigma = slices.Clone(c2.Parameters.CommitmentKeys[i].Basis) + for j := range commitments { + c2.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } // Hash initial contribution - c2.Hash = c2.hash() // TODO remove + c2.Challenge = c2.hash() // TODO remove return c2, evals } -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var sigma fr.Element - var deltaBI, deltaInvBI big.Int - - updateValue() - - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) - - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) - - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) - } - - // Update PKK using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.PKK); i++ { - c.Parameters.G1.PKK[i].ScalarMultiplication(&c.Parameters.G1.PKK[i], &deltaInvBI) - } - - // 4. Hash contribution - c.Hash = c.hash() -} - func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { contribs := append([]*Phase2{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { @@ -262,7 +326,7 @@ func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { func verifyPhase2(current, contribution *Phase2) error { // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) + deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Challenge[:], 1) // Check for knowledge of δ if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { @@ -278,23 +342,15 @@ func verifyPhase2(current, contribution *Phase2) error { } // Check for valid updates of PKK and Z using - L, prevL := merge(contribution.Parameters.G1.PKK, current.Parameters.G1.PKK) + L, prevL := linearCombination(contribution.Parameters.G1.PKK, current.Parameters.G1.PKK) if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { return errors.New("couldn't verify valid updates of PKK using δ⁻¹") } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) + Z, prevZ := linearCombination(contribution.Parameters.G1.Z, current.Parameters.G1.Z) if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { return errors.New("couldn't verify valid updates of PKK using δ⁻¹") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } - } - return nil } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index b4b013c46b..1aeebc91eb 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -194,7 +194,7 @@ func (phase2 *Phase2) clone() Phase2 { r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) r.Parameters.G2.Delta = phase2.Parameters.G2.Delta r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) + r.Challenge = append(r.Challenge, phase2.Challenge...) return r } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index a868b43f68..d18cf14945 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -156,14 +156,14 @@ func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { } // returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +func linearCombination(A, B []curve.G1Affine, r []fr.Element) (a, b curve.G1Affine) { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + if _, err := a.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) + } + if _, err := b.MultiExp(B, r[:len(B)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) return } @@ -172,8 +172,9 @@ func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, shifted curve.G1Affine) { // the common section, 1 to N-2 var common curve.G1Affine - common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] - + if _, err := common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + panic(err) + } var c big.Int rPowers[1].BigInt(&c) truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] @@ -189,8 +190,9 @@ func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { // the common section, 1 to N-2 var common curve.G2Affine - common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}) // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] - + if _, err := common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + panic(err) + } var c big.Int rPowers[1].BigInt(&c) truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] @@ -242,7 +244,7 @@ type valueUpdate struct { // updateValue produces values associated with contribution to an existing value. // if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment // the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func updateValue(prev curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, updated curve.G1Affine, contributionValue fr.Element) { +func updateValue(value *curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { if _, err := contributionValue.SetRandom(); err != nil { panic(err) } @@ -251,10 +253,10 @@ func updateValue(prev curve.G1Affine, challenge []byte, dst byte) (proof valueUp _, _, g1, _ := curve.Generators() proof.contributionCommitment.ScalarMultiplication(&g1, &contributionValueI) - updated.ScalarMultiplication(&prev, &contributionValueI) + value.ScalarMultiplication(value, &contributionValueI) // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, updated, challenge, dst) // r + pokBase := genR(proof.contributionCommitment, *value, challenge, dst) // r proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) return @@ -264,47 +266,38 @@ func updateValue(prev curve.G1Affine, challenge []byte, dst byte) (proof valueUp // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. // prevCommitment is assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(prev, updated pair, challenge []byte, dst byte) error { - noG2 := prev.g2 == nil - if noG2 != (updated.g2 == nil) { +// challengePoint is normally equal to [denom] +func (x *valueUpdate) verify(denom, num pair, challengePoint curve.G1Affine, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { return errors.New("erasing or creating g2 values") } - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !updated.validUpdate() { + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { return errors.New("contribution values subgroup check failed") } // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, updated.g1, challenge, dst) // verification challenge in the form of a g2 base + r := genR(x.contributionCommitment, challengePoint, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } - // check that the updated/previous ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(updated.g1, prev.g1, *updated.g2, *prev.g2) { + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } - // now verify that updated₁/previous₁ = x ( = x/g1 = π/r ) + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(updated.g1, prev.g1, x.contributionPok, r) { + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } return nil } -/* -// setEmpty does not provide proofs, only sets the value to [1] -func (x *valueUpdate) setEmpty(g1Only bool) { - _, _, g1, g2 := curve.Generators() - x.updatedCommitment.g1.Set(&g1) - if !g1Only { - x.updatedCommitment.g2 = &g2 - } -}*/ - func toRefs[T any](s []T) []*T { res := make([]*T, len(s)) for i := range s { From 282484a2c43c5120d592e2579ed82a0c5c5d3a29 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 11 Dec 2024 16:26:08 -0600 Subject: [PATCH 015/105] feat phase2 verification --- backend/groth16/bn254/mpcsetup/phase1.go | 24 +++----- backend/groth16/bn254/mpcsetup/phase2.go | 78 +++++++++++++++--------- backend/groth16/bn254/mpcsetup/utils.go | 29 ++++----- 3 files changed, 71 insertions(+), 60 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 90700abff3..717d60069e 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -47,14 +47,15 @@ func (p *Phase1) Contribute() { // SrsCommons are the circuit-independent components of the Groth16 SRS, // computed by the first phase. +// in all that follows, N is the domain size type SrsCommons struct { G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} } G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} Beta curve.G2Affine // [β]₂ } } @@ -181,22 +182,13 @@ func (p *Phase1) Verify(next *Phase1) error { return err } - if err := next.proofs.Tau.verify( - pair{p.parameters.G1.Tau[1], &p.parameters.G2.Tau[1]}, - pair{next.parameters.G1.Tau[1], &next.parameters.G2.Tau[1]}, - next.Challenge, 1); err != nil { + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], &p.parameters.G2.Tau[1]}, pair{next.parameters.G1.Tau[1], &next.parameters.G2.Tau[1]}, next.Challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify( // TODO Get ACTUAL updated tau - pair{taus, nil}, - pair{alphaTaus, nil}, - next.Challenge, 2); err != nil { + if err := next.proofs.Alpha.verify(pair{taus, nil}, pair{alphaTaus, nil}, next.Challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify( - pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, // TODO @Tabaie combine the verification of all βτⁱ - pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, - next.Challenge, 3); err != nil { + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, next.Challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 2f5d271dee..6f1758b5f9 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -20,7 +20,7 @@ import ( cs "github.com/consensys/gnark/constraint/bn254" ) -type Phase2Evaluations struct { +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ @@ -36,13 +36,13 @@ type Phase2 struct { Parameters struct { G1 struct { Delta curve.G1Affine - Z []curve.G1Affine // Z are multiples of the domain vanishing polynomial - PKK []curve.G1Affine // PKK are the coefficients of the private witness - SigmaCKK [][]curve.G1Affine // Commitment bases + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine - Sigma []curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment } } @@ -55,43 +55,53 @@ type Phase2 struct { } func (c *Phase2) Verify(next *Phase2) error { - if challenge := c.hash(); len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + challenge := c.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous phase's hash") } + next.Challenge = challenge - if err := next.Delta.verify( - pair{c.Parameters.G1.Delta, &c.Parameters.G2.Delta}, - pair{next.Parameters.G1.Delta, &next.Parameters.G2.Delta}, - next.Challenge, 1); err != nil { - return fmt.Errorf("failed to verify contribution to δ: %w", err) + if len(next.Parameters.G1.Z) != len(c.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(c.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(c.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(c.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") } - for i := range c.Sigmas { - if err := next.Sigmas[i].verify( - pair{c.Parameters.G1.SigmaCKK[i][0], &c.Parameters.G2.Sigma[i]}, - pair{next.Parameters.G1.SigmaCKK[i][0], &next.Parameters.G2.Sigma[i]}, - next.Challenge, byte(2+i)); err != nil { - return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) - } - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i][1:]) { + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) // TODO @Tabaie If all contributions are being verified in one go, we could reuse r + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Denominator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range c.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { return errors.New("commitment proving key subgroup check failed") } + + if err := verifyContribution(&c.Sigmas[i], c.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &c.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) + } } + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { return errors.New("derived values 𝔾₁ subgroup check failed") } - r := linearCombCoeffs(len(next.Parameters.G1.Z)) - - for i := range c.Sigmas { - prevComb, nextComb := linearCombination(c.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], r) - if !sameRatioUnsafe(nextComb, prevComb, next.Parameters.G2.Sigma[i], c.Parameters.G2.Sigma[i]) { - return fmt.Errorf("failed to verify contribution to σ[%d]", i) - } + denom := cloneAppend([]curve.G1Affine{c.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, c.Parameters.G1.Z, c.Parameters.G1.PKK) + if err := verifyContribution(&c.Delta, denom, num, &c.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) } - linearCombination() + return nil } func (c *Phase2) Contribute() { @@ -315,7 +325,7 @@ func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { func verifyPhase2(current, contribution *Phase2) error { // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Challenge[:], 1) + deltaR := genR(contribution.PublicKey.SG, current.Challenge[:], 1) // Check for knowledge of δ if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { @@ -348,3 +358,15 @@ func (c *Phase2) hash() []byte { c.writeTo(sha) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 49057f637a..038482a366 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -41,7 +41,7 @@ func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { pk.SXG.ScalarMultiplication(&pk.SG, &xBi) // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) + R := genR(pk.SG, challenge, dst) // compute x*spG2 pk.XR.ScalarMultiplication(&R, &xBi) @@ -74,7 +74,7 @@ func powersI(a *big.Int, n int) []fr.Element { return powers(&aMont, n) } -// Returns [1, a, a², ..., aⁿ⁻¹ ] +// Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, n int) []fr.Element { result := make([]fr.Element, n) @@ -144,20 +144,19 @@ func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func linearCombination(A, B []curve.G1Affine, r []fr.Element) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - if _, err := a.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { panic(err) } - if _, err := b.MultiExp(B, r[:len(B)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return + return res } // linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +// the results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, shifted curve.G1Affine) { // the common section, 1 to N-2 var common curve.G1Affine @@ -176,6 +175,7 @@ func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, // linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i // Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) +// the results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { // the common section, 1 to N-2 var common curve.G2Affine @@ -195,11 +195,10 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) // it is to be used as a challenge for generating a proof of knowledge to x // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -227,7 +226,6 @@ func (p *pair) validUpdate() bool { type valueUpdate struct { contributionCommitment curve.G1Affine // x or [Xⱼ]₁ contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ - //updatedCommitment pair // [X₁..Xⱼ] } // updateValue produces values associated with contribution to an existing value. @@ -245,7 +243,7 @@ func updateValue(value *curve.G1Affine, challenge []byte, dst byte) (proof value value.ScalarMultiplication(value, &contributionValueI) // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, *value, challenge, dst) // r + pokBase := genR(proof.contributionCommitment, challenge, dst) // r proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) return @@ -255,8 +253,7 @@ func updateValue(value *curve.G1Affine, challenge []byte, dst byte) (proof value // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. // prevCommitment is assumed to be valid. No subgroup check and the like. -// challengePoint is normally equal to [denom] -func (x *valueUpdate) verify(denom, num pair, challengePoint curve.G1Affine, challenge []byte, dst byte) error { +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { return errors.New("erasing or creating g2 values") @@ -267,7 +264,7 @@ func (x *valueUpdate) verify(denom, num pair, challengePoint curve.G1Affine, cha } // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challengePoint, challenge, dst) // verification challenge in the form of a g2 base + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") From b37e663fbd70aa5ff96aadb77805f7fbb941e6cc Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 17:18:18 -0600 Subject: [PATCH 016/105] docs lemma --- backend/groth16/bn254/mpcsetup/marshal.go | 77 ++++++----- backend/groth16/bn254/mpcsetup/phase1.go | 154 ++++++++++++++++------ 2 files changed, 154 insertions(+), 77 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 964b45a371..3daf9fa35c 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -17,51 +17,42 @@ func appendRefs[T any](s []interface{}, v []T) []interface{} { return s } -// proofRefsSlice produces a slice consisting of references to all proof sub-elements -// prepended by the size parameter, to be used in WriteTo and ReadFrom functions -func (p *Phase1) proofRefsSlice() []interface{} { - return []interface{}{ - &p.proofs.Tau.contributionCommitment, - &p.proofs.Tau.contributionPok, - &p.proofs.Alpha.contributionCommitment, - &p.proofs.Alpha.contributionPok, - &p.proofs.Beta.contributionCommitment, - &p.proofs.Beta.contributionPok, - } -} - // WriteTo implements io.WriterTo // It does not write the Challenge from the previous contribution func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { - - if n, err = p.parameters.WriteTo(writer); err != nil { - return - } - - enc := curve.NewEncoder(writer) - for _, v := range p.proofRefsSlice() { - if err = enc.Encode(v); err != nil { - return n + enc.BytesWritten(), err + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return } } - return n + enc.BytesWritten(), nil + return } // ReadFrom implements io.ReaderFrom // It does not read the Challenge from the previous contribution func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { - - if n, err = p.parameters.ReadFrom(reader); err != nil { - return - } - - dec := curve.NewDecoder(reader) - for _, v := range p.proofRefsSlice() { // we've already decoded N - if err = dec.Decode(v); err != nil { - return n + dec.BytesRead(), err + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return } } - return n + dec.BytesRead(), nil + return } // WriteTo implements io.WriterTo @@ -162,7 +153,7 @@ func (c *SrsCommons) refsSlice() []interface{} { N := len(c.G2.Tau) estimatedNbElems := 5*N - 1 // size N 1 - // 𝔾₂ representation for β 1 + // [β]₂ 1 // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N @@ -209,3 +200,21 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 717d60069e..1e1c527e4c 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -10,14 +10,27 @@ import ( "crypto/sha256" "errors" "fmt" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math/big" - "runtime" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" @@ -45,21 +58,6 @@ func (p *Phase1) Contribute() { p.parameters.update(&tauContrib, &alphaContrib, &betaContrib, true) } -// SrsCommons are the circuit-independent components of the Groth16 SRS, -// computed by the first phase. -// in all that follows, N is the domain size -type SrsCommons struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } -} - // setZero instantiates the parameters, and sets all contributions to zero func (c *SrsCommons) setZero(N uint64) { c.G1.Tau = make([]curve.G1Affine, 2*N-2) @@ -122,7 +120,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element, prin // that it produces the same values. // The inner workings of the random beacon are out of scope. // WARNING: Seal modifies p, just as Contribute does. -// The result will be an INVALID Phase1 object. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { var ( bb bytes.Buffer @@ -158,37 +156,31 @@ func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { // Verify assumes previous is correct func (p *Phase1) Verify(next *Phase1) error { - if prevHash := p.hash(); !bytes.Equal(next.Challenge, p.hash()) { // if chain-verifying contributions, challenge fields are optional as they can be computed as we go - if len(next.Challenge) != 0 { - return errors.New("the challenge does not match the previous phase's hash") - } - next.Challenge = prevHash + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous phase's hash") } + next.Challenge = challenge - // TODO compare sizes - - r := linearCombCoeffs(len(next.parameters.G1.Tau) - 1) // the longest of all lengths - // will be reusing the coefficients TODO @Tabaie make sure that's okay - + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p N := len(next.parameters.G2.Tau) - var taus, alphaTaus, betaTaus curve.G1Affine - if _, err := taus.MultiExp(next.parameters.G1.Tau[1:N], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // τ¹ + r.τ² + … + rᴺ⁻².τⁿ⁻¹ - return err - } - if _, err := alphaTaus.MultiExp(next.parameters.G1.AlphaTau[1:], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // ατ¹ + r.ατ² + … + rᴺ⁻².ατⁿ⁻¹ - return err - } - if _, err := betaTaus.MultiExp(next.parameters.G1.BetaTau[1:], r, ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // βτ¹ + r.βτ² + … + rᴺ⁻².βτⁿ⁻¹ - return err + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], &p.parameters.G2.Tau[1]}, pair{next.parameters.G1.Tau[1], &next.parameters.G2.Tau[1]}, next.Challenge, 1); err != nil { + r := linearCombCoeffs(len(next.parameters.G1.Tau) + len(next.parameters.G1.AlphaTau) + len(next.parameters.G1.BetaTau) - 1) // the longest of all lengths + // will be reusing the coefficients + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{taus, nil}, pair{alphaTaus, nil}, next.Challenge, 2); err != nil { + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{p.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, next.Challenge, 3); err != nil { + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -201,6 +193,77 @@ func (p *Phase1) Verify(next *Phase1) error { _, _, g1, g2 := curve.Generators() + // lemma: let R be an integral domain and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢⱼ ZⁱTʲ G' = ∑ g'ᵢⱼ ZⁱTʲ + // polynomials in R[X,Y,Z,T]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ FracR + // + // view our polynomials in FracR[X,Y,Z,T] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖₗ XᶦYʲZᵏTˡ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏTˡ + // pick i0 ,j0 , k0, l0 where f'ᵢ₀ⱼ₀, g'ₖ₀ₗ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀ₗ₀/g'ₖ₀ₗ₀ + // now for any i,j: fᵢⱼg'ₖ₀ₗ₀ = f'ᵢⱼgₖ₀ₗ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i,j: fᵢ₀ⱼ₀g'ᵢⱼ = f'ᵢ₀ⱼ₀gᵢⱼ ⇒ + // gᵢⱼ = x g'ᵢⱼ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // + // we already know that a₀ = 1, a₁ = τ, + // c₀ = α, d₀ = β, b₀ = 1, + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b + + // we want to establish G1.AlphaTau[i] = [ατⁱ]₁, + // already known for i = 0 from the contribution checks + // let [cᵢ]₁ = G1.AlphaTau[i] + // let C1 ≔ c₀ + rc₁ + ... + rᴺ⁻²cₙ₋₂ + // C2 ≔ c₁ + rc₂ + ... + rᴺ⁻²cₙ₋₁ + // then if indeed cᵢ = ατⁱ, we get + // C1/C2 = 1/τ + // conversely, from C1/C2 = 1/τ we get + // c₁ + rc₂ + ... + rᴺ⁻²cₙ₋₁ = τc₀ + rτc₁ + ... + rᴺ⁻²τcₙ₋₂ + // which by the Schwartz-Zippel lemma and a simple induction + // implies the desired result with overwhelming probability. + + // The same argument works for G1.BetaTau[i] + + // we also want to establish Gⱼ.Tau[i] = [τⁱ]ⱼ + // let [aᵢ]₁ = G1.Tau[i] and [bᵢ]₂ = G2.Tau[i] + // let A1 ≔ a₀ + ra₁ + ... + r²ᴺ⁻³a₂ₙ₋₃ + // A2 ≔ a₁ + ra₂ + ... + r²ᴺ⁻³a₂ₙ₋₂ + // B1 ≔ b₀ + sb₁ + ... + sᴺ⁻²bₙ₋₂ + // B2 ≔ b₁ + sb₂ + ... + sᴺ⁻²bₙ₋₁ + // for random r,s + // if the values are correct clearly we get A1/A2 = B1/B2 + // + // if A1/A2 = B1/B2, by the bivariate Schwartz-Zippel we get + // (a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³)(b₁ + b₂Y + ... + bₙ₋₁Yᴺ⁻²) = + // (a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³)(b₀ + b₁Y + ... + bₙ₋₂Yᴺ⁻²) + // furthermore by previous checks we already know that + // a₀=1, a₁= τ + // Assume by induction that for all i < m ≤ N-1: bᵢ = τⁱ + // Then modulo (X, Yᵐ) we get + // τ + τ²Y + ... + τᵐ⁻¹Yᵐ⁻² + bₘYᵐ⁻¹ = + // τ (1 + τ²Y + ... + τᵐ⁻¹Yᵐ⁻¹) + // which gives bₘ = τᵐ + // We then get A1/A2 = 1/τ which by the previous lemma gives + // aᵢ = τⁱ + + // now to combine all the above + + // verify monomials + // for 1 ≤ i ≤ 2N-3 we want to check τⁱ⁺¹/τⁱ = τ // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check @@ -208,8 +271,6 @@ func (p *Phase1) Verify(next *Phase1) error { tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r) tauT2, tauS2 := linearCombinationsG2(next.parameters.G2.Tau[1:], r) - alphaTT, alphaTS := linearCombinationsG1(next.parameters.G1.AlphaTau, r) - betaTT, betaTS := linearCombinationsG1(next.parameters.G1.BetaTau, r) if !sameRatioUnsafe(tauS1, tauT1, next.parameters.G2.Tau[1], g2) { return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") @@ -219,6 +280,9 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") } + alphaTT, alphaTS := linearCombinationsG1(next.parameters.G1.AlphaTau, r) + betaTT, betaTS := linearCombinationsG1(next.parameters.G1.BetaTau, r) + // for 0 ≤ i < N we want to check the ατⁱ // By well-formedness checked by ReadFrom, we assume that ατ⁰ = α // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ @@ -235,6 +299,10 @@ func (p *Phase1) Verify(next *Phase1) error { // TODO @Tabaie combine all pairing checks except the second one + taus := linearCombination(next.parameters.G1.Tau[:N], r) // 1 + r.τ¹ + r.τ² + … + rᴺ⁻¹.τᴺ⁻¹ + alphaTaus := linearCombination(next.parameters.G1.AlphaTau, r) // α + r.ατ¹ + r.ατ² + … + rᴺ⁻¹.ατᴺ⁻¹ + betaTaus := linearCombination(next.parameters.G1.BetaTau, r) // β + r.τ¹ + r.βτ² + … + rᴺ⁻¹.βτᴺ⁻¹ + return nil } From 50cd7e910caf2dbb6c0fe826c81b345fb8f69506 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 17:47:57 -0600 Subject: [PATCH 017/105] docs new ratio check method --- backend/groth16/bn254/mpcsetup/phase1.go | 79 +++++-------------- backend/groth16/bn254/mpcsetup/utils.go | 98 +++++++++++++----------- 2 files changed, 73 insertions(+), 104 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1e1c527e4c..faf6b21dd0 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -195,20 +195,20 @@ func (p *Phase1) Verify(next *Phase1) error { // lemma: let R be an integral domain and // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢⱼ ZⁱTʲ G' = ∑ g'ᵢⱼ ZⁱTʲ - // polynomials in R[X,Y,Z,T]. + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in R[X,Y,Z]. // if F/F' = G/G' // then F/F' = G/G' ∈ FracR // - // view our polynomials in FracR[X,Y,Z,T] + // view our polynomials in FracR[X,Y,Z] // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖₗ XᶦYʲZᵏTˡ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏTˡ - // pick i0 ,j0 , k0, l0 where f'ᵢ₀ⱼ₀, g'ₖ₀ₗ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀ₗ₀/g'ₖ₀ₗ₀ - // now for any i,j: fᵢⱼg'ₖ₀ₗ₀ = f'ᵢⱼgₖ₀ₗ₀ ⇒ + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i0 ,j0 , k0 where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ // fᵢⱼ = x f'ᵢⱼ - // likewise for any i,j: fᵢ₀ⱼ₀g'ᵢⱼ = f'ᵢ₀ⱼ₀gᵢⱼ ⇒ - // gᵢⱼ = x g'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ // now we use this to check that: // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ @@ -216,58 +216,21 @@ func (p *Phase1) Verify(next *Phase1) error { // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - // - // we already know that a₀ = 1, a₁ = τ, - // c₀ = α, d₀ = β, b₀ = 1, // construct the polynomials // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b - - // we want to establish G1.AlphaTau[i] = [ατⁱ]₁, - // already known for i = 0 from the contribution checks - // let [cᵢ]₁ = G1.AlphaTau[i] - // let C1 ≔ c₀ + rc₁ + ... + rᴺ⁻²cₙ₋₂ - // C2 ≔ c₁ + rc₂ + ... + rᴺ⁻²cₙ₋₁ - // then if indeed cᵢ = ατⁱ, we get - // C1/C2 = 1/τ - // conversely, from C1/C2 = 1/τ we get - // c₁ + rc₂ + ... + rᴺ⁻²cₙ₋₁ = τc₀ + rτc₁ + ... + rᴺ⁻²τcₙ₋₂ - // which by the Schwartz-Zippel lemma and a simple induction - // implies the desired result with overwhelming probability. - - // The same argument works for G1.BetaTau[i] - - // we also want to establish Gⱼ.Tau[i] = [τⁱ]ⱼ - // let [aᵢ]₁ = G1.Tau[i] and [bᵢ]₂ = G2.Tau[i] - // let A1 ≔ a₀ + ra₁ + ... + r²ᴺ⁻³a₂ₙ₋₃ - // A2 ≔ a₁ + ra₂ + ... + r²ᴺ⁻³a₂ₙ₋₂ - // B1 ≔ b₀ + sb₁ + ... + sᴺ⁻²bₙ₋₂ - // B2 ≔ b₁ + sb₂ + ... + sᴺ⁻²bₙ₋₁ - // for random r,s - // if the values are correct clearly we get A1/A2 = B1/B2 - // - // if A1/A2 = B1/B2, by the bivariate Schwartz-Zippel we get - // (a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³)(b₁ + b₂Y + ... + bₙ₋₁Yᴺ⁻²) = - // (a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³)(b₀ + b₁Y + ... + bₙ₋₂Yᴺ⁻²) - // furthermore by previous checks we already know that - // a₀=1, a₁= τ - // Assume by induction that for all i < m ≤ N-1: bᵢ = τⁱ - // Then modulo (X, Yᵐ) we get - // τ + τ²Y + ... + τᵐ⁻¹Yᵐ⁻² + bₘYᵐ⁻¹ = - // τ (1 + τ²Y + ... + τᵐ⁻¹Yᵐ⁻¹) - // which gives bₘ = τᵐ - // We then get A1/A2 = 1/τ which by the previous lemma gives - // aᵢ = τⁱ - - // now to combine all the above - - // verify monomials - - // for 1 ≤ i ≤ 2N-3 we want to check τⁱ⁺¹/τⁱ = τ - // i.e. e(τⁱ⁺¹,[1]₂) = e(τⁱ,[τ]₂). Due to bi-linearity we can instead check - // e(∑rⁱ⁻¹τⁱ⁺¹,[1]₂) = e(∑rⁱ⁻¹τⁱ,[τ]₂), which is tantamount to the check - // ∑rⁱ⁻¹τⁱ⁺¹ / ∑rⁱ⁻¹τⁱ = τ + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r) tauT2, tauS2 := linearCombinationsG2(next.parameters.G2.Tau[1:], r) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 038482a366..ac7a20f884 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -25,29 +25,6 @@ type PublicKey struct { XR curve.G2Affine // XR = X.R ∈ 𝔾₂ proof of knowledge } -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -61,17 +38,7 @@ func bitReverse[T any](a []T) { } func linearCombCoeffs(n int) []fr.Element { - var a fr.Element - if _, err := a.SetRandom(); err != nil { - panic(err) - } - return powers(&a, n) -} - -func powersI(a *big.Int, n int) []fr.Element { - var aMont fr.Element - aMont.SetBigInt(a) - return powers(&aMont, n) + return bivariateRandomMonomials(n) } // Returns [1, a, a², ..., aᴺ⁻¹ ] @@ -79,7 +46,7 @@ func powers(a *fr.Element, n int) []fr.Element { result := make([]fr.Element, n) if n >= 1 { - result[0] = fr.NewElement(1) + result[0].SetOne() } if n >= 2 { result[1].Set(a) @@ -122,15 +89,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -/* -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - return sameRatioUnsafe(a1, b1, a2, b2) -}*/ - // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine @@ -309,6 +267,54 @@ func areInSubGroupG2(s []curve.G2Affine) bool { return areInSubGroup(toRefs(s)) } -func truncate[T any](s []T) []T { - return s[:len(s)-1] +// bivariateRandomMonomials returns 1, x, ..., xˣᴰ⁰; y, xy, ..., xˣᴰ¹y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(xD ...int) []fr.Element { + if len(xD) == 0 { + return nil + } + totalSize := xD[0] + for i := 1; i < len(xD); i++ { + totalSize += xD[i] + if xD[i] > xD[0] { + panic("implementation detail: first max degree must be the largest") + } + } + + res := make([]fr.Element, totalSize) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:xD[0]]) + + if len(xD) == 1 { + return res + } + + y := make([]fr.Element, len(xD)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + totalSize = xD[0] + for d := 1; d < len(xD); d++ { + for i := range res[:xD[d]] { + res[totalSize+i].Mul(&res[i], &y[d]) + } + totalSize += xD[d] + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } } From 527dd22504efb83745b6e545c4c98ab7e3039ec3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 18:54:05 -0600 Subject: [PATCH 018/105] feat newLinearCombinationsG1 --- backend/groth16/bn254/mpcsetup/phase1.go | 13 ++- backend/groth16/bn254/mpcsetup/utils.go | 103 ++++++++++++++++------- 2 files changed, 84 insertions(+), 32 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index faf6b21dd0..590fefccaa 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -232,7 +232,18 @@ func (p *Phase1) Verify(next *Phase1) error { // 3. c₀ = α // 4. d₀ = β - tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r) + ends := partialSums(len(next.parameters.G1.Tau), len(next.parameters.G1.AlphaTau), len(next.parameters.G1.BetaTau)) + + coeffs := bivariateRandomMonomials(ends...) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, next.parameters.G1.Tau...) + g1s = append(g1s, next.parameters.G1.AlphaTau...) + g1s = append(g1s, next.parameters.G1.BetaTau...) + + g1Num, g1Denom := linearCombinationsG1(g1s, coeffs, ends) + + tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r, ends) tauT2, tauS2 := linearCombinationsG2(next.parameters.G2.Tau[1:], r) if !sameRatioUnsafe(tauS1, tauT1, next.parameters.G2.Tau[1], g2) { diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index ac7a20f884..86428dc260 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -112,21 +112,54 @@ func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { return res } -// linearCombinationsG1 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -// the results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG1(A []curve.G1Affine, rPowers []fr.Element) (truncated, shifted curve.G1Affine) { - // the common section, 1 to N-2 - var common curve.G1Affine - if _, err := common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It assumes without checking that powers[i+1] = powers[i]*powers[1] unless i or i+1 is a partial sum of sizes +// the slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + largeCoeffs := make([]fr.Element, len(ends)) + for i := range ends { + largeCoeffs[i].Neg(&powers[ends[i]-1]) + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := shifted.MultiExp(A, powers, msmCfg); err != nil { panic(err) } - var c big.Int - rPowers[1].BigInt(&c) - truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] - rPowers[len(A)-1].BigInt(&c) - shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + prevEnd := 0 + for i := range ends { + if ends[i] <= prevEnd { + panic("non-increasing ends") + } + + powers[2*i] = powers[prevEnd] + powers[2*i+1] = largeCoeffs[i] + + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + + prevEnd = ends[i] + } + // TODO @Tabaie O(1) MSM worth it? + if _, err := truncated.MultiExp(A[:2*len(ends)], powers[:2*len(ends)], msmCfg); err != nil { + panic(err) + } + truncated.Add(&truncated, &shifted) return } @@ -267,42 +300,38 @@ func areInSubGroupG2(s []curve.G2Affine) bool { return areInSubGroup(toRefs(s)) } -// bivariateRandomMonomials returns 1, x, ..., xˣᴰ⁰; y, xy, ..., xˣᴰ¹y; ... +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice -func bivariateRandomMonomials(xD ...int) []fr.Element { - if len(xD) == 0 { +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { return nil } - totalSize := xD[0] - for i := 1; i < len(xD); i++ { - totalSize += xD[i] - if xD[i] > xD[0] { - panic("implementation detail: first max degree must be the largest") - } - } - res := make([]fr.Element, totalSize) + res := make([]fr.Element, ends[]) if _, err := res[1].SetRandom(); err != nil { panic(err) } - setPowers(res[:xD[0]]) + setPowers(res[:ends[0]]) - if len(xD) == 1 { + if len(ends) == 1 { return res } - y := make([]fr.Element, len(xD)) + y := make([]fr.Element, len(ends)) if _, err := y[1].SetRandom(); err != nil { panic(err) } setPowers(y) - totalSize = xD[0] - for d := 1; d < len(xD); d++ { - for i := range res[:xD[d]] { - res[totalSize+i].Mul(&res[i], &y[d]) + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) } - totalSize += xD[d] } return res @@ -318,3 +347,15 @@ func setPowers(x []fr.Element) { x[i].Mul(&x[i-1], &x[1]) } } + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} From 18411e59bdb4c3d497a86d21bbd912a64ebdb752 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 19:00:17 -0600 Subject: [PATCH 019/105] clean phase1 ver --- backend/groth16/bn254/mpcsetup/phase1.go | 44 +++--------------------- 1 file changed, 4 insertions(+), 40 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 590fefccaa..f16bd9755f 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -170,9 +170,6 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("domain size mismatch") } - r := linearCombCoeffs(len(next.parameters.G1.Tau) + len(next.parameters.G1.AlphaTau) + len(next.parameters.G1.BetaTau) - 1) // the longest of all lengths - // will be reusing the coefficients - // verify updates to τ, α, β if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) @@ -191,8 +188,6 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - _, _, g1, g2 := curve.Generators() - // lemma: let R be an integral domain and // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ @@ -234,48 +229,17 @@ func (p *Phase1) Verify(next *Phase1) error { ends := partialSums(len(next.parameters.G1.Tau), len(next.parameters.G1.AlphaTau), len(next.parameters.G1.BetaTau)) - coeffs := bivariateRandomMonomials(ends...) - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) g1s = append(g1s, next.parameters.G1.Tau...) g1s = append(g1s, next.parameters.G1.AlphaTau...) g1s = append(g1s, next.parameters.G1.BetaTau...) - g1Num, g1Denom := linearCombinationsG1(g1s, coeffs, ends) - - tauT1, tauS1 := linearCombinationsG1(next.parameters.G1.Tau[1:], r, ends) - tauT2, tauS2 := linearCombinationsG2(next.parameters.G2.Tau[1:], r) - - if !sameRatioUnsafe(tauS1, tauT1, next.parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify 𝔾₁ representations of the τⁱ") - } - - if !sameRatioUnsafe(next.parameters.G1.Tau[1], g1, tauS2, tauT2) { - return errors.New("couldn't verify 𝔾₂ representations of the τⁱ") - } - - alphaTT, alphaTS := linearCombinationsG1(next.parameters.G1.AlphaTau, r) - betaTT, betaTS := linearCombinationsG1(next.parameters.G1.BetaTau, r) - - // for 0 ≤ i < N we want to check the ατⁱ - // By well-formedness checked by ReadFrom, we assume that ατ⁰ = α - // For 0 < i < N we check that ατⁱ/ατⁱ⁻¹ = τ, since we have a representation of τ in 𝔾₂ - // with a similar bi-linearity argument as above we can do this with a single pairing check - - // TODO eliminate these by combining with update checking + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(next.parameters.G2.Tau, linearCombCoeffs(len(next.parameters.G2.Tau))) - if !sameRatioUnsafe(alphaTS, alphaTT, next.parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify the ατⁱ") + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("value update check failed") } - if !sameRatioUnsafe(betaTS, betaTT, next.parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify the βτⁱ") - } - - // TODO @Tabaie combine all pairing checks except the second one - - taus := linearCombination(next.parameters.G1.Tau[:N], r) // 1 + r.τ¹ + r.τ² + … + rᴺ⁻¹.τᴺ⁻¹ - alphaTaus := linearCombination(next.parameters.G1.AlphaTau, r) // α + r.ατ¹ + r.ατ² + … + rᴺ⁻¹.ατᴺ⁻¹ - betaTaus := linearCombination(next.parameters.G1.BetaTau, r) // β + r.τ¹ + r.βτ² + … + rᴺ⁻¹.βτᴺ⁻¹ return nil } From 849f9d57a654f5a252964c4a078f498049c1cc9c Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 19:13:50 -0600 Subject: [PATCH 020/105] docs and fixes --- backend/groth16/bn254/mpcsetup/phase1.go | 11 ++++++----- backend/groth16/bn254/mpcsetup/utils.go | 13 ++++++++++--- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index f16bd9755f..c50a68e909 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -188,17 +188,17 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - // lemma: let R be an integral domain and + // lemma: let K be a field and // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in R[X,Y,Z]. + // polynomials in K[X,Y,Z]. // if F/F' = G/G' - // then F/F' = G/G' ∈ FracR + // then F/F' = G/G' ∈ K // - // view our polynomials in FracR[X,Y,Z] + // view our polynomials in K[X,Y,Z] // By multiplying out the polynomials we get // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i0 ,j0 , k0 where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ // fᵢⱼ = x f'ᵢⱼ @@ -226,6 +226,7 @@ func (p *Phase1) Verify(next *Phase1) error { // 2. b₀ = 1 // 3. c₀ = α // 4. d₀ = β + // and so the desired results follow ends := partialSums(len(next.parameters.G1.Tau), len(next.parameters.G1.AlphaTau), len(next.parameters.G1.BetaTau)) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 86428dc260..14cd4d5ca9 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -122,7 +122,7 @@ func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { // + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] // .... (shifted) // -// It assumes without checking that powers[i+1] = powers[i]*powers[1] unless i or i+1 is a partial sum of sizes +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes // the slices powers and A will be modified func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { if ends[len(ends)-1] != len(A) || len(A) != len(powers) { @@ -141,6 +141,11 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( panic(err) } + // compute truncated as + // r.shifted + // + powers[0].A[0] + powers[ends[0].A[ends[0]] + ... + // - powers[ends[0]-1].A[ends[0]-1] - powers[ends[1]-1].A[ends[1]-1] - ... + r := powers[1] prevEnd := 0 for i := range ends { if ends[i] <= prevEnd { @@ -155,11 +160,13 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( prevEnd = ends[i] } + powers[len(ends)*2] = r + A[len(ends)*2] = shifted + // TODO @Tabaie O(1) MSM worth it? - if _, err := truncated.MultiExp(A[:2*len(ends)], powers[:2*len(ends)], msmCfg); err != nil { + if _, err := truncated.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { panic(err) } - truncated.Add(&truncated, &shifted) return } From 34b3967bae50f6e9d37b1ac4032eb6182234071b Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 12 Dec 2024 19:36:51 -0600 Subject: [PATCH 021/105] feat key extraction - commitments --- backend/groth16/bn254/mpcsetup/phase2.go | 90 ++++++++---------------- backend/groth16/bn254/mpcsetup/setup.go | 28 ++++++-- 2 files changed, 49 insertions(+), 69 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 6f1758b5f9..2927acbdb8 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -11,13 +11,13 @@ import ( "errors" "fmt" "github.com/consensys/gnark/backend/groth16/internal" + cs "github.com/consensys/gnark/constraint/bn254" "math/big" "slices" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/constraint" - cs "github.com/consensys/gnark/constraint/bn254" ) type Phase2Evaluations struct { // TODO @Tabaie rename @@ -155,20 +155,13 @@ func (c *Phase2) Contribute() { // Init is to be run by the coordinator // It involves no coin tosses. A verifier should // simply rerun all the steps -func (p *Phase2) Init(commons SrsCommons) { - -} - -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { +func (p *Phase2) Init(r1cs *cs.R1CS, commons SrsCommons) Phase2Evaluations { - srs := srs1.parameters - size := len(srs.G1.AlphaTau) + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - var c2 Phase2 - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -210,10 +203,10 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() nWires := nbInternal + nbSecret + nbPublic @@ -252,36 +245,36 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) for i := 0; i < n-1; i++ { // TODO @Tabaie why is the last element always 0? - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - c2.Sigmas = make([]valueUpdate, len(commitments)) - c2.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) - c2.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) for j := range commitments { evals.G1.CKK[i] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) - c2.Parameters.G2.Sigma[j] = g2 + p.Parameters.G2.Sigma[j] = g2 } nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) // Evaluate PKK - c2.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) nbCommitmentsSeen := 0 @@ -297,7 +290,7 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } else if j < nbPublic || isCommitment { evals.G1.VKK = append(evals.G1.VKK, tmp) } else { - c2.Parameters.G1.PKK = append(c2.Parameters.G1.PKK, tmp) + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) } if isCommitment { nbCommitmentsSeen++ @@ -305,54 +298,27 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } for j := range commitments { - c2.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Hash initial contribution - c2.Challenge = c2.hash() // TODO remove - return c2, evals + p.Challenge = nil + + return evals } +// VerifyPhase2 +// c0 must be initialized with the Init method func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { + // CIRITCAL TODO: Should run the "beacon" step afterwards contribs := append([]*Phase2{c0, c1}, c...) for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { + if err := contribs[i].Verify(contribs[i+1]); err != nil { return err } } return nil } -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, current.Challenge[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") - } - - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } - - // Check for valid updates of PKK and Z using - L, prevL := linearCombination(contribution.Parameters.G1.PKK, current.Parameters.G1.PKK) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of PKK using δ⁻¹") - } - Z, prevZ := linearCombination(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of PKK using δ⁻¹") - } - - return nil -} - func (c *Phase2) hash() []byte { sha := sha256.New() c.writeTo(sha) diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index a1e79dfedb..470df05263 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -8,22 +8,26 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" groth16 "github.com/consensys/gnark/backend/groth16/bn254" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +func (srs2 *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + // TODO @Tabaie beacon contribution + _, _, _, g2 := curve.Generators() // Initialize PK pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) pk.G1.Z = srs2.Parameters.G1.Z bitReverse(pk.G1.Z) pk.G1.K = srs2.Parameters.G1.PKK - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) + pk.G2.Beta.Set(&commons.G2.Beta) pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) // Filter out infinity points @@ -69,14 +73,24 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) + vk.G2.Beta.Set(&commons.G2.Beta) vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&srs2.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = srs2.Parameters.G1.SigmaCKK[i] + } + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) From 81fdfb681261bc67d966d07223fa95cfc890216c Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 13 Dec 2024 16:57:20 -0600 Subject: [PATCH 022/105] reface p *Phase2 --- backend/groth16/bn254/mpcsetup/marshal.go | 42 +++++++-------- backend/groth16/bn254/mpcsetup/phase2.go | 56 ++++++++++---------- backend/groth16/bn254/mpcsetup/setup.go | 25 +++++---- backend/groth16/bn254/mpcsetup/setup_test.go | 14 ++--- backend/groth16/bn254/mpcsetup/utils.go | 6 --- 5 files changed, 72 insertions(+), 71 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 3daf9fa35c..bb975bbcbf 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -56,25 +56,25 @@ func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { } // WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + n, err := p.writeTo(writer) if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Challenge) + nBytes, err := writer.Write(p.Challenge) return int64(nBytes) + n, err } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { +func (p *Phase2) writeTo(writer io.Writer) (int64, error) { enc := curve.NewEncoder(writer) toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.PKK, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, + &p.PublicKey.SG, + &p.PublicKey.SXG, + &p.PublicKey.XR, + &p.Parameters.G1.Delta, + p.Parameters.G1.PKK, + p.Parameters.G1.Z, + &p.Parameters.G2.Delta, } for _, v := range toEncode { @@ -87,16 +87,16 @@ func (c *Phase2) writeTo(writer io.Writer) (int64, error) { } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { dec := curve.NewDecoder(reader) toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.PKK, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, + &p.PublicKey.SG, + &p.PublicKey.SXG, + &p.PublicKey.XR, + &p.Parameters.G1.Delta, + &p.Parameters.G1.PKK, + &p.Parameters.G1.Z, + &p.Parameters.G2.Delta, } for _, v := range toEncode { @@ -105,8 +105,8 @@ func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { } } - c.Challenge = make([]byte, 32) - n, err := reader.Read(c.Challenge) + p.Challenge = make([]byte, 32) + n, err := reader.Read(p.Challenge) return int64(n) + dec.BytesRead(), err } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 2927acbdb8..769c52c8e3 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -54,17 +54,17 @@ type Phase2 struct { Challenge []byte } -func (c *Phase2) Verify(next *Phase2) error { - challenge := c.hash() +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous phase's hash") } next.Challenge = challenge - if len(next.Parameters.G1.Z) != len(c.Parameters.G1.Z) || - len(next.Parameters.G1.PKK) != len(c.Parameters.G1.PKK) || - len(next.Parameters.G1.SigmaCKK) != len(c.Parameters.G1.SigmaCKK) || - len(next.Parameters.G2.Sigma) != len(c.Parameters.G2.Sigma) { + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { return errors.New("contribution size mismatch") } @@ -79,12 +79,12 @@ func (c *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] - for i := range c.Sigmas { // match the first commitment basis elem against the contribution commitment + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&c.Sigmas[i], c.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &c.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := verifyContribution(&p.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -95,60 +95,60 @@ func (c *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{c.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, c.Parameters.G1.Z, c.Parameters.G1.PKK) - if err := verifyContribution(&c.Delta, denom, num, &c.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&p.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } return nil } -func (c *Phase2) Contribute() { +func (p *Phase2) Contribute() { // Sample toxic δ var delta, deltaInv fr.Element var deltaBI, deltaInvBI big.Int - c.Challenge = c.hash() + p.Challenge = p.hash() - if len(c.Parameters.G1.SigmaCKK) > 255 { + if len(p.Parameters.G1.SigmaCKK) > 255 { panic("too many commitments") // DST collision } - for i := range c.Parameters.G1.SigmaCKK { + for i := range p.Parameters.G1.SigmaCKK { var ( sigmaContribution fr.Element sigmaContributionI big.Int ) - pk := c.Parameters.G1.SigmaCKK[i] - c.Sigmas[i], sigmaContribution = updateValue(&pk[0], c.Challenge, byte(2+i)) + pk := p.Parameters.G1.SigmaCKK[i] + p.Sigmas[i], sigmaContribution = updateValue(&pk[0], p.Challenge, byte(2+i)) sigmaContribution.BigInt(&sigmaContributionI) for j := 1; j < len(pk); j++ { pk[j].ScalarMultiplication(&pk[j], &sigmaContributionI) } - c.Parameters.G2.Sigma[i].ScalarMultiplication(&c.Parameters.G2.Sigma[i], &sigmaContributionI) + p.Parameters.G2.Sigma[i].ScalarMultiplication(&p.Parameters.G2.Sigma[i], &sigmaContributionI) } - c.Delta, delta = updateValue(&c.Parameters.G1.Delta, c.Challenge, 1) + p.Delta, delta = updateValue(&p.Parameters.G1.Delta, p.Challenge, 1) deltaInv.Inverse(&delta) delta.BigInt(&deltaBI) deltaInv.BigInt(&deltaInvBI) // Update [δ]₂ - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + p.Parameters.G2.Delta.ScalarMultiplication(&p.Parameters.G2.Delta, &deltaBI) - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + p.Parameters.G1.Delta.ScalarMultiplication(&p.Parameters.G1.Delta, &deltaBI) + p.Parameters.G2.Delta.ScalarMultiplication(&p.Parameters.G2.Delta, &deltaBI) // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for i := 0; i < len(p.Parameters.G1.Z); i++ { + p.Parameters.G1.Z[i].ScalarMultiplication(&p.Parameters.G1.Z[i], &deltaInvBI) } // Update PKK using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.PKK); i++ { - c.Parameters.G1.PKK[i].ScalarMultiplication(&c.Parameters.G1.PKK[i], &deltaInvBI) + for i := 0; i < len(p.Parameters.G1.PKK); i++ { + p.Parameters.G1.PKK[i].ScalarMultiplication(&p.Parameters.G1.PKK[i], &deltaInvBI) } } @@ -319,9 +319,9 @@ func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { return nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.writeTo(sha) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 470df05263..24273e01f4 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -12,7 +12,14 @@ import ( groth16 "github.com/consensys/gnark/backend/groth16/bn254" ) -func (srs2 *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { // TODO @Tabaie beacon contribution @@ -22,13 +29,13 @@ func (srs2 *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstra pk.Domain = *fft.NewDomain(uint64(nConstraints)) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.PKK + pk.G1.K = p.Parameters.G1.PKK pk.G2.Beta.Set(&commons.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -75,9 +82,9 @@ func (srs2 *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstra // Initialize VK vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) vk.G1.Beta.Set(&commons.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) vk.G2.Beta.Set(&commons.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK @@ -85,10 +92,10 @@ func (srs2 *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstra pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) for i := range vk.CommitmentKeys { vk.CommitmentKeys[i].G = g2 - vk.CommitmentKeys[i].GSigmaNeg.Neg(&srs2.Parameters.G2.Sigma[i]) + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] - pk.CommitmentKeys[i].BasisExpSigma = srs2.Parameters.G1.SigmaCKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] } // sets e, -[δ]2, -[γ]2 diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 742b2539b7..a1b5ad1e60 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -176,14 +176,14 @@ func (p *Phase1) clone() Phase1 { return r } -func (phase2 *Phase2) clone() Phase2 { +func (p *Phase2) clone() Phase2 { r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.PKK = append(r.Parameters.G1.PKK, phase2.Parameters.G1.PKK...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Challenge = append(r.Challenge, phase2.Challenge...) + r.Parameters.G1.Delta = p.Parameters.G1.Delta + r.Parameters.G1.PKK = append(r.Parameters.G1.PKK, p.Parameters.G1.PKK...) + r.Parameters.G1.Z = append(r.Parameters.G1.Z, p.Parameters.G1.Z...) + r.Parameters.G2.Delta = p.Parameters.G2.Delta + r.PublicKey = p.PublicKey + r.Challenge = append(r.Challenge, p.Challenge...) return r } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 14cd4d5ca9..a429eed3bc 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -19,12 +19,6 @@ import ( "github.com/consensys/gnark/internal/utils" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine // XR = X.R ∈ 𝔾₂ proof of knowledge -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) From ebaff8743d06adaad36e575df15d5f610aad5dce Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 13 Dec 2024 18:11:47 -0600 Subject: [PATCH 023/105] feat phase2 final contribution --- backend/groth16/bn254/mpcsetup/phase1.go | 44 ++++--------- backend/groth16/bn254/mpcsetup/phase2.go | 80 +++++++++++++----------- backend/groth16/bn254/mpcsetup/setup.go | 8 ++- backend/groth16/bn254/mpcsetup/utils.go | 47 ++++++++++---- 4 files changed, 97 insertions(+), 82 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index c50a68e909..91585aedb6 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -51,11 +51,11 @@ func (p *Phase1) Contribute() { var ( tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = updateValue(&p.parameters.G1.Tau[1], p.Challenge, 1) - p.proofs.Alpha, alphaContrib = updateValue(&p.parameters.G1.AlphaTau[0], p.Challenge, 2) - p.proofs.Beta, betaContrib = updateValue(&p.parameters.G1.BetaTau[0], p.Challenge, 3) + p.proofs.Tau, tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) + p.proofs.Alpha, alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) + p.proofs.Beta, betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) - p.parameters.update(&tauContrib, &alphaContrib, &betaContrib, true) + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } // setZero instantiates the parameters, and sets all contributions to zero @@ -82,32 +82,28 @@ func (c *SrsCommons) setOne(N uint64) { } // from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications -func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element, principalG1sPrecomputed bool) { - i0 := 0 - if principalG1sPrecomputed { - i0 = 1 - } +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. - scaleG1InPlace(c.G1.Tau[i0+1:], tauUpdates[i0+1:]) // first element remains 1. second element may have been precomputed. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) alphaUpdates[0].Set(alphaUpdate) - for i := i0; i < len(alphaUpdates); i++ { + for i := range alphaUpdates { alphaUpdates[i].Mul(&tauUpdates[i], &alphaUpdates[1]) } - scaleG1InPlace(c.G1.AlphaTau[i0:], alphaUpdates[i0:]) // first element may have been precomputed + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) betaUpdates[0].Set(betaUpdate) - for i := i0; i < len(betaUpdates); i++ { + for i := range betaUpdates { alphaUpdates[i].Mul(&tauUpdates[i], &betaUpdates[1]) } - scaleG1InPlace(c.G1.BetaTau[i0:], betaUpdates[i0:]) + scaleG1InPlace(c.G1.BetaTau, betaUpdates) var betaUpdateI big.Int betaUpdate.SetBigInt(&betaUpdateI) @@ -122,24 +118,8 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element, prin // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - var ( - bb bytes.Buffer - err error - ) - bb.Write(p.hash()) - bb.Write(beaconChallenge) - - newContribs := make([]fr.Element, 3) - // cryptographically unlikely for this to be run more than once - for newContribs[0].IsZero() || newContribs[1].IsZero() || newContribs[2].IsZero() { - if newContribs, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), 3); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2], false) - + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 769c52c8e3..ac54387241 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -104,52 +104,60 @@ func (p *Phase2) Verify(next *Phase2) error { return nil } -func (p *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - - p.Challenge = p.hash() - - if len(p.Parameters.G1.SigmaCKK) > 255 { - panic("too many commitments") // DST collision +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } } - for i := range p.Parameters.G1.SigmaCKK { - var ( - sigmaContribution fr.Element - sigmaContributionI big.Int - ) - - pk := p.Parameters.G1.SigmaCKK[i] - p.Sigmas[i], sigmaContribution = updateValue(&pk[0], p.Challenge, byte(2+i)) - sigmaContribution.BigInt(&sigmaContributionI) - for j := 1; j < len(pk); j++ { - pk[j].ScalarMultiplication(&pk[j], &sigmaContributionI) + + for i := range sigma { + sigma[i].BigInt(&I) + for j := range sigma { + scale(&p.Parameters.G1.SigmaCKK[i][j]) } - p.Parameters.G2.Sigma[i].ScalarMultiplication(&p.Parameters.G2.Sigma[i], &sigmaContributionI) + point := &p.Parameters.G2.Sigma[i] + point.ScalarMultiplicationBase(&I) } - p.Delta, delta = updateValue(&p.Parameters.G1.Delta, p.Challenge, 1) + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) - deltaInv.Inverse(&delta) - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } +} - // Update [δ]₂ - p.Parameters.G2.Delta.ScalarMultiplication(&p.Parameters.G2.Delta, &deltaBI) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() - p.Parameters.G1.Delta.ScalarMultiplication(&p.Parameters.G1.Delta, &deltaBI) - p.Parameters.G2.Delta.ScalarMultiplication(&p.Parameters.G2.Delta, &deltaBI) + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = updateValue(p.Parameters.G1.Delta, p.Challenge, 1) - // Update Z using δ⁻¹ - for i := 0; i < len(p.Parameters.G1.Z); i++ { - p.Parameters.G1.Z[i].ScalarMultiplication(&p.Parameters.G1.Z[i], &deltaInvBI) + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision } - - // Update PKK using δ⁻¹ - for i := 0; i < len(p.Parameters.G1.PKK); i++ { - p.Parameters.G1.PKK[i].ScalarMultiplication(&p.Parameters.G1.PKK[i], &deltaInvBI) + for i := range sigma { + p.Sigmas[i], sigma[i] = updateValue(p.Parameters.G1.SigmaCKK[i][0], p.Challenge, byte(2+i)) } + + p.update(&delta, sigma) } // Init is to be run by the coordinator diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 24273e01f4..f13be1c20c 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -19,14 +19,16 @@ import ( // The inner workings of the random beacon are out of scope. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. -func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nbConstraints int, beaconChallenge []byte) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { - // TODO @Tabaie beacon contribution + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) + pk.Domain = *fft.NewDomain(uint64(nbConstraints)) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&p.Parameters.G1.Delta) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index a429eed3bc..f24743a60c 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -8,15 +8,13 @@ package mpcsetup import ( "bytes" "errors" - "math/big" - "math/bits" - "runtime" - "time" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) func bitReverse[T any](a []T) { @@ -199,10 +197,6 @@ func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { return spG2 } -type RandomBeacon func(time.Time) []byte - -// func (rb RandomBeacon) GenerateChallenge(...) []byte {} - type pair struct { g1 curve.G1Affine g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. @@ -223,7 +217,7 @@ type valueUpdate struct { // updateValue produces values associated with contribution to an existing value. // if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment // the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func updateValue(value *curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { +func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { if _, err := contributionValue.SetRandom(); err != nil { panic(err) } @@ -232,7 +226,7 @@ func updateValue(value *curve.G1Affine, challenge []byte, dst byte) (proof value _, _, g1, _ := curve.Generators() proof.contributionCommitment.ScalarMultiplication(&g1, &contributionValueI) - value.ScalarMultiplication(value, &contributionValueI) + value.ScalarMultiplication(&value, &contributionValueI) // proof of knowledge to commitment. Algorithm 3 from section 3.7 pokBase := genR(proof.contributionCommitment, challenge, dst) // r @@ -360,3 +354,34 @@ func partialSums(s ...int) []int { } return sums } + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} From c2f3b8dabb64cfe27f439f91020028ac3287dcb2 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Sun, 15 Dec 2024 19:47:37 -0600 Subject: [PATCH 024/105] feat marshal --- backend/groth16/bn254/mpcsetup/marshal.go | 162 ++++++++++++++-------- backend/groth16/bn254/mpcsetup/utils.go | 2 +- 2 files changed, 108 insertions(+), 56 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index bb975bbcbf..b2f878fda9 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -6,11 +6,12 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bn254" "io" ) -func appendRefs[T any](s []interface{}, v []T) []interface{} { +func appendRefs[T any](s []any, v []T) []any { for i := range v { s = append(s, &v[i]) } @@ -55,110 +56,161 @@ func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { return } -// WriteTo implements io.WriterTo -func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := p.writeTo(writer) - if err != nil { - return n, err +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } - nBytes, err := writer.Write(p.Challenge) - return int64(nBytes) + n, err -} -func (p *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &p.PublicKey.SG, - &p.PublicKey.SXG, - &p.PublicKey.XR, - &p.Parameters.G1.Delta, - p.Parameters.G1.PKK, - p.Parameters.G1.Z, - &p.Parameters.G2.Delta, + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = appendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = appendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters + enc := curve.NewEncoder(writer) + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &p.PublicKey.SG, - &p.PublicKey.SXG, - &p.PublicKey.XR, - &p.Parameters.G1.Delta, - &p.Parameters.G1.PKK, - &p.Parameters.G1.Z, - &p.Parameters.G2.Delta, + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes - for _, v := range toEncode { + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) + + dec := curve.NewDecoder(reader) + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } + n += dec.BytesRead() + + dn, err := p.Delta.ReadFrom(reader) + n += dn + if err != nil { + return n, err + } - p.Challenge = make([]byte, 32) - n, err := reader.Read(p.Challenge) - return int64(n) + dec.BytesRead(), err + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + return n, nil +} + +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 2 + refs := make([]any, 2, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs = appendRefs(refs, c.G1.A) + refs = appendRefs(refs, c.G1.B) + refs = appendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") + } + + return refs } // WriteTo implements io.WriterTo func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, - } - for _, v := range toEncode { + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - return dec.BytesRead(), nil + return n + dec.BytesRead(), nil } // refsSlice produces a slice consisting of references to all sub-elements // prepended by the size parameter, to be used in WriteTo and ReadFrom functions -func (c *SrsCommons) refsSlice() []interface{} { - N := len(c.G2.Tau) - estimatedNbElems := 5*N - 1 +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 // size N 1 // [β]₂ 1 // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N - refs := make([]interface{}, 1, estimatedNbElems) + refs := make([]any, 1, expectedLen) refs[0] = N refs = appendRefs(refs, c.G1.Tau[1:]) @@ -166,7 +218,7 @@ func (c *SrsCommons) refsSlice() []interface{} { refs = appendRefs(refs, c.G1.BetaTau) refs = appendRefs(refs, c.G1.AlphaTau) - if len(refs) != estimatedNbElems { + if uint64(len(refs)) != expectedLen { panic("incorrect length estimate") } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f24743a60c..d99abed52a 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -302,7 +302,7 @@ func bivariateRandomMonomials(ends ...int) []fr.Element { return nil } - res := make([]fr.Element, ends[]) + res := make([]fr.Element, ends[len(ends)-1]) if _, err := res[1].SetRandom(); err != nil { panic(err) } From 5708047d0f0cd0b9f29033ed1e57b647bf7c142d Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Sun, 15 Dec 2024 21:22:56 -0600 Subject: [PATCH 025/105] test integration full - fails --- .../groth16/bn254/mpcsetup/marshal_test.go | 13 +- backend/groth16/bn254/mpcsetup/phase1.go | 37 ++++- backend/groth16/bn254/mpcsetup/phase2.go | 33 +++-- backend/groth16/bn254/mpcsetup/setup.go | 4 +- backend/groth16/bn254/mpcsetup/setup_test.go | 130 ++++++++++-------- 5 files changed, 128 insertions(+), 89 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go index 51651f8aca..adbfc3fe0e 100644 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ b/backend/groth16/bn254/mpcsetup/marshal_test.go @@ -5,17 +5,7 @@ package mpcsetup -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bn254" - cs "github.com/consensys/gnark/constraint/bn254" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - +/* TODO bring this back func TestContributionSerialization(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") @@ -41,3 +31,4 @@ func TestContributionSerialization(t *testing.T) { assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) } +*/ diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 91585aedb6..56e2457a9b 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "errors" "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "math/big" @@ -123,14 +124,19 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { return p.parameters } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := contribs[i].Verify(contribs[i+1]); err != nil { - return err +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err } + prev = c[i] } - return nil + return prev.Seal(beaconChallenge), nil } // Verify assumes previous is correct @@ -234,3 +240,22 @@ func (p *Phase1) hash() []byte { sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index ac54387241..c13e6f9d56 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -10,6 +10,7 @@ import ( "crypto/sha256" "errors" "fmt" + "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" cs "github.com/consensys/gnark/constraint/bn254" "math/big" @@ -20,6 +21,8 @@ import ( "github.com/consensys/gnark/constraint" ) +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ @@ -160,10 +163,10 @@ func (p *Phase2) Contribute() { p.update(&delta, sigma) } -// Init is to be run by the coordinator +// Initialize is to be run by the coordinator // It involves no coin tosses. A verifier should // simply rerun all the steps -func (p *Phase2) Init(r1cs *cs.R1CS, commons SrsCommons) Phase2Evaluations { +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { @@ -314,22 +317,28 @@ func (p *Phase2) Init(r1cs *cs.R1CS, commons SrsCommons) Phase2Evaluations { return evals } -// VerifyPhase2 -// c0 must be initialized with the Init method -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - // CIRITCAL TODO: Should run the "beacon" step afterwards - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := contribs[i].Verify(contribs[i+1]); err != nil { - return err +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return &pk, &vk, nil } func (p *Phase2) hash() []byte { sha := sha256.New() - p.writeTo(sha) + p.WriteTo(sha) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index f13be1c20c..ec0002f578 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -19,7 +19,7 @@ import ( // The inner workings of the random beacon are out of scope. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. -func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nbConstraints int, beaconChallenge []byte) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { // final contributions contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) @@ -28,7 +28,7 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, nbConstrain _, _, _, g2 := curve.Generators() // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nbConstraints)) + pk.Domain = *fft.NewDomain(uint64(len(evals.G1.A))) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&p.Parameters.G1.Delta) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index a1b5ad1e60..7396acea71 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -6,9 +6,12 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" cs "github.com/consensys/gnark/constraint/bn254" + "io" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,51 +23,85 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" ) +// TestSetupCircuit a full integration test of the MPC setup func TestSetupCircuit(t *testing.T) { const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 + nbContributionsPhase1 = 3 + nbContributionsPhase2 = 3 ) assert := require.New(t) - var srs1 Phase1 - srs1.Initialize(1 << power) + // Compile the circuit + var circuit Circuit + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &circuit) + assert.NoError(err) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add its contribution and send back to coordinator. - prev := srs1.clone() + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) + var ( + bb bytes.Buffer // simulating network communications + serialized [max(nbContributionsPhase1, nbContributionsPhase2)][]byte + phase1 [nbContributionsPhase1]*Phase1 + p1 Phase1 + phase2 [nbContributionsPhase2]*Phase2 + p2 Phase2 + ) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err = v.WriteTo(&bb) + assert.NoError(err) + return bb.Bytes() + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } + + p1.Contribute() + serialized[i] = serialize(&p1) + } + + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } + + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + { + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead + } - var evals Phase2Evaluations r1cs := ccs.(*cs.R1CS) // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add its contribution and send back to coordinator. - prev := srs2.clone() + for i := range phase2 { + if i == 0 { + p2.Initialize(r1cs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + pk, vk, err := VerifyPhase2(r1cs, &srsCommons, []byte("testing phase2"), phase2[:]...) + assert.NoError(err) // Build the witness var preImage, hash fr.Element @@ -81,13 +118,14 @@ func TestSetupCircuit(t *testing.T) { assert.NoError(err) // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + proof, err := groth16.Prove(ccs, pk, witness) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) + err = groth16.Verify(proof, vk, pubWitness) assert.NoError(err) } +/* func BenchmarkPhase1(b *testing.B) { const power = 14 @@ -140,7 +178,7 @@ func BenchmarkPhase2(b *testing.B) { }) } - +*/ // Circuit defines a pre-image knowledge proof // mimc(secret preImage) = public hash type Circuit struct { @@ -158,32 +196,8 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil -} - -func (p *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, p.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, p.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, p.Parameters.G1.BetaTau...) - - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, p.Parameters.G2.Tau...) - r.Parameters.G2.Beta = p.Parameters.G2.Beta - - r.PublicKeys = p.PublicKeys - r.Hash = append(r.Hash, p.Hash...) - - return r -} - -func (p *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = p.Parameters.G1.Delta - r.Parameters.G1.PKK = append(r.Parameters.G1.PKK, p.Parameters.G1.PKK...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, p.Parameters.G1.Z...) - r.Parameters.G2.Delta = p.Parameters.G2.Delta - r.PublicKey = p.PublicKey - r.Challenge = append(r.Challenge, p.Challenge...) + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) - return r + return err } From 143c2b249de95b4063cfabeabd571ee3a62ddec3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:06:04 -0600 Subject: [PATCH 026/105] test value update/pok --- backend/groth16/bn254/mpcsetup/marshal.go | 4 +- backend/groth16/bn254/mpcsetup/phase1.go | 5 +- backend/groth16/bn254/mpcsetup/setup_test.go | 1 + backend/groth16/bn254/mpcsetup/unit_test.go | 51 ++++++++++++++++++++ backend/groth16/bn254/mpcsetup/utils.go | 1 - 5 files changed, 55 insertions(+), 7 deletions(-) create mode 100644 backend/groth16/bn254/mpcsetup/unit_test.go diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index b2f878fda9..e8cd155f69 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -210,9 +210,9 @@ func (c *SrsCommons) refsSlice() []any { // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N - refs := make([]any, 1, expectedLen) + refs := make([]any, 2, expectedLen) refs[0] = N - + refs[1] = &c.G2.Beta refs = appendRefs(refs, c.G1.Tau[1:]) refs = appendRefs(refs, c.G2.Tau[1:]) refs = appendRefs(refs, c.G1.BetaTau) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 56e2457a9b..fdcbf57ff2 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -61,7 +61,7 @@ func (p *Phase1) Contribute() { // setZero instantiates the parameters, and sets all contributions to zero func (c *SrsCommons) setZero(N uint64) { - c.G1.Tau = make([]curve.G1Affine, 2*N-2) + c.G1.Tau = make([]curve.G1Affine, 2*N-1) c.G2.Tau = make([]curve.G2Affine, N) c.G1.AlphaTau = make([]curve.G1Affine, N) c.G1.BetaTau = make([]curve.G1Affine, N) @@ -232,9 +232,6 @@ func (p *Phase1) Verify(next *Phase1) error { } func (p *Phase1) hash() []byte { - if len(p.Challenge) == 0 { - panic("challenge field missing") - } sha := sha256.New() p.WriteTo(sha) sha.Write(p.Challenge) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 7396acea71..2c4a3a2d81 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -78,6 +78,7 @@ func TestSetupCircuit(t *testing.T) { // Verify contributions for phase 1 and generate non-circuit-specific parameters srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { var commonsRead SrsCommons deserialize(&commonsRead, serialize(&srsCommons)) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go new file mode 100644 index 0000000000..bdfce05ae6 --- /dev/null +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -0,0 +1,51 @@ +package mpcsetup + +import ( + "bytes" + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/stretchr/testify/require" + "math/big" + "testing" +) + +// small tests for sub-functionalities of the mpc setup +// this file is not autogenerated, and not generified for other curves + +func TestContributionPok(t *testing.T) { + const ( + pokChallenge = "challenge" + pokDst = 1 + ) + x0, err := curve.HashToG1([]byte("contribution test"), nil) + require.NoError(t, err) + proof, d := updateValue(x0, []byte(pokChallenge), pokDst) + var ( + x1 curve.G1Affine + dI big.Int + ) + d.BigInt(&dI) + x1.ScalarMultiplication(&x0, &dI) + + // verify proof - no G2 + require.NoError(t, proof.verify(pair{x0, nil}, pair{x1, nil}, []byte(pokChallenge), pokDst)) + + // verify proof - with G2 + y0, err := curve.RandomOnG2() + require.NoError(t, err) + var y1 curve.G2Affine + y1.ScalarMultiplication(&y0, &dI) + + require.NoError(t, proof.verify(pair{x0, &y0}, pair{x1, &y1}, []byte(pokChallenge), pokDst)) + + // read/write round-trip + var bb bytes.Buffer + n0, err := proof.WriteTo(&bb) + require.NoError(t, err) + var proofBack valueUpdate + n1, err := proofBack.ReadFrom(&bb) + require.NoError(t, err) + require.Equal(t, n0, n1) + + require.NoError(t, proofBack.verify(pair{x0, nil}, pair{x1, nil}, []byte(pokChallenge), pokDst)) + require.NoError(t, proofBack.verify(pair{x0, &y0}, pair{x1, &y1}, []byte(pokChallenge), pokDst)) +} diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index d99abed52a..e8d66b3479 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -215,7 +215,6 @@ type valueUpdate struct { } // updateValue produces values associated with contribution to an existing value. -// if prevCommitment contains only a 𝔾₁ value, then so will updatedCommitment // the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { if _, err := contributionValue.SetRandom(); err != nil { From 2afebf1edd82a783abb3a0ebfe007ef92793c1a7 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:23:29 -0600 Subject: [PATCH 027/105] test setup no contributors --- backend/groth16/bn254/mpcsetup/phase1.go | 2 + backend/groth16/bn254/mpcsetup/phase2.go | 2 +- backend/groth16/bn254/mpcsetup/setup.go | 12 +++- backend/groth16/bn254/mpcsetup/setup_test.go | 70 +++++++++++--------- backend/groth16/bn254/mpcsetup/unit_test.go | 23 +++++++ 5 files changed, 75 insertions(+), 34 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index fdcbf57ff2..0c5a358eac 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -52,6 +52,8 @@ func (p *Phase1) Contribute() { var ( tauContrib, alphaContrib, betaContrib fr.Element ) + fmt.Println("initial tau", p.parameters.G1.Tau[1].String()) + fmt.Println("challenge", p.Challenge) p.proofs.Tau, tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) p.proofs.Alpha, alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) p.proofs.Beta, betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index c13e6f9d56..dbd4260e4f 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -333,7 +333,7 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c } pk, vk := prev.Seal(commons, &evals, beaconChallenge) - return &pk, &vk, nil + return pk, vk, nil } func (p *Phase2) hash() []byte { diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index ec0002f578..75f464d3d3 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -9,7 +9,8 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" - groth16 "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" ) // Seal performs the final contribution and outputs the proving and verifying keys. @@ -19,7 +20,7 @@ import ( // The inner workings of the random beacon are out of scope. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. -func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) @@ -27,6 +28,11 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK pk.Domain = *fft.NewDomain(uint64(len(evals.G1.A))) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) @@ -105,5 +111,5 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 2c4a3a2d81..7fc7894240 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -12,6 +12,7 @@ import ( "github.com/consensys/gnark-crypto/ecc/bn254/fr" cs "github.com/consensys/gnark/constraint/bn254" "io" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -23,8 +24,8 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" ) -// TestSetupCircuit a full integration test of the MPC setup -func TestSetupCircuit(t *testing.T) { +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { const ( nbContributionsPhase1 = 3 nbContributionsPhase2 = 3 @@ -33,9 +34,7 @@ func TestSetupCircuit(t *testing.T) { assert := require.New(t) // Compile the circuit - var circuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &circuit) - assert.NoError(err) + ccs := getTestCircuit(t) domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -50,7 +49,7 @@ func TestSetupCircuit(t *testing.T) { serialize := func(v io.WriterTo) []byte { bb.Reset() - _, err = v.WriteTo(&bb) + _, err := v.WriteTo(&bb) assert.NoError(err) return bb.Bytes() } @@ -85,12 +84,10 @@ func TestSetupCircuit(t *testing.T) { srsCommons = commonsRead } - r1cs := ccs.(*cs.R1CS) - // Prepare for phase-2 for i := range phase2 { if i == 0 { - p2.Initialize(r1cs, &srsCommons) + p2.Initialize(ccs, &srsCommons) } p2.Contribute() serialized[i] = serialize(&p2) @@ -101,29 +98,10 @@ func TestSetupCircuit(t *testing.T) { deserialize(phase2[i], serialized[i]) } - pk, vk, err := VerifyPhase2(r1cs, &srsCommons, []byte("testing phase2"), phase2[:]...) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - // Build the witness - var preImage, hash fr.Element - { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) - } - - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) - - pubWitness, err := witness.Public() - assert.NoError(err) - - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, pk, witness) - assert.NoError(err) - - err = groth16.Verify(proof, vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } /* @@ -202,3 +180,35 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } + +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + var circuit Circuit + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &circuit) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() +} + +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + // Build the witness + var preImage, hash fr.Element + { + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + } + + witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) + + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) +} diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index bdfce05ae6..feaf8fbc20 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -2,6 +2,7 @@ package mpcsetup import ( "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/stretchr/testify/require" "math/big" @@ -49,3 +50,25 @@ func TestContributionPok(t *testing.T) { require.NoError(t, proofBack.verify(pair{x0, nil}, pair{x1, nil}, []byte(pokChallenge), pokDst)) require.NoError(t, proofBack.verify(pair{x0, &y0}, pair{x1, &y1}, []byte(pokChallenge), pokDst)) } + +// TestSetupBeaconOnly tests the setup/key extraction +// as well as the random beacon contribution +// without any untrusted contributors +func TestSetupBeaconOnly(t *testing.T) { + + // Compile the circuit + ccs := getTestCircuit(t) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) + + var ( + p1 Phase1 + p2 Phase2 + ) + p1.Initialize(domainSize) + commons := p1.Seal([]byte("beacon 1")) + + evals := p2.Initialize(ccs, &commons) + pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) + + proveVerifyCircuit(t, pk, vk) +} From cd1d7a3ef00c1aa7709f4e568be3af5e3dad25d3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:29:00 -0600 Subject: [PATCH 028/105] fix setOne --- backend/groth16/bn254/mpcsetup/phase1.go | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 0c5a358eac..5fb618414e 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -73,15 +73,23 @@ func (c *SrsCommons) setZero(N uint64) { // setOne instantiates the parameters, and sets all contributions to one func (c *SrsCommons) setOne(N uint64) { c.setZero(N) - for i := range c.G1.Tau { - c.G1.Tau[i] = c.G1.Tau[0] + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } } - for i := range c.G1.AlphaTau { - c.G1.AlphaTau[i] = c.G1.AlphaTau[0] - c.G1.BetaTau[i] = c.G1.AlphaTau[0] - c.G2.Tau[i] = c.G2.Tau[0] + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } } - c.G2.Beta = c.G2.Tau[0] + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } // from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications From db574a4e4758c210600f8cef960f82440dc3e98a Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 12:47:51 -0600 Subject: [PATCH 029/105] fix mergeIterator --- backend/groth16/bn254/mpcsetup/phase2.go | 2 +- backend/groth16/internal/utils.go | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index dbd4260e4f..644a8bac22 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -277,7 +277,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) for j := range commitments { - evals.G1.CKK[i] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) p.Parameters.G2.Sigma[j] = g2 } diff --git a/backend/groth16/internal/utils.go b/backend/groth16/internal/utils.go index 67648b2104..be4bcf34fa 100644 --- a/backend/groth16/internal/utils.go +++ b/backend/groth16/internal/utils.go @@ -57,6 +57,9 @@ func (i *MergeIterator) findLeast() { // Peek returns the next smallest value and the index of the slice it came from // If the iterator is empty, Peek returns (math.MaxInt, -1) func (i *MergeIterator) Peek() (value, index int) { + if i.leastIndex == -1 { + return math.MaxInt, -1 + } return i.slices[i.leastIndex][0], i.leastIndex } @@ -64,8 +67,8 @@ func (i *MergeIterator) Peek() (value, index int) { // If the iterator is empty, Next returns (math.MaxInt, -1) func (i *MergeIterator) Next() (value, index int) { value, index = i.Peek() - i.findLeast() i.slices[i.leastIndex] = i.slices[i.leastIndex][1:] + i.findLeast() return } From bd89ca5b074c85fd044c7b2ba7ca91c8a9b399f3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:08:41 -0600 Subject: [PATCH 030/105] fix commitmentInfo in vk --- backend/groth16/bn254/mpcsetup/marshal.go | 5 +++-- backend/groth16/bn254/mpcsetup/phase2.go | 3 +++ backend/groth16/bn254/mpcsetup/setup.go | 1 + backend/groth16/bn254/mpcsetup/unit_test.go | 22 +++++++++++++++++++++ 4 files changed, 29 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index e8cd155f69..3511a1a878 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -150,10 +150,11 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { func (c *Phase2Evaluations) refsSlice() []any { N := uint64(len(c.G1.A)) - expectedLen := 3*N + 2 - refs := make([]any, 2, expectedLen) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) refs[0] = &c.G1.CKK refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted refs = appendRefs(refs, c.G1.A) refs = appendRefs(refs, c.G1.B) refs = appendRefs(refs, c.G2.B) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 644a8bac22..12f638d0ba 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -33,6 +33,7 @@ type Phase2Evaluations struct { // TODO @Tabaie rename G2 struct { B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { @@ -222,6 +223,8 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 75f464d3d3..bb0f3449a0 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -105,6 +105,7 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index feaf8fbc20..cc48b25f70 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -4,6 +4,8 @@ import ( "bytes" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" "github.com/stretchr/testify/require" "math/big" "testing" @@ -70,5 +72,25 @@ func TestSetupBeaconOnly(t *testing.T) { evals := p2.Initialize(ccs, &commons) pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) + _pk := pk.(*groth16Impl.ProvingKey) + //_vk := vk.(*groth16Impl.VerifyingKey) + + rpk, rvk, err := groth16.Setup(ccs) + require.NoError(t, err) + _rpk := rpk.(*groth16Impl.ProvingKey) + + // assert everything is of the same size + require.Equal(t, len(_rpk.G1.A), len(_pk.G1.A)) + require.Equal(t, len(_rpk.G1.B), len(_pk.G1.B)) + require.Equal(t, len(_rpk.G1.K), len(_pk.G1.K)) + require.Equal(t, len(_rpk.G1.Z), len(_pk.G1.Z)) + require.Equal(t, len(_rpk.G2.B), len(_pk.G2.B)) + require.Equal(t, len(_rpk.CommitmentKeys), len(_pk.CommitmentKeys)) + for i := range _rpk.CommitmentKeys { + require.Equal(t, len(_rpk.CommitmentKeys[i].BasisExpSigma), len(_pk.CommitmentKeys[i].BasisExpSigma)) + require.Equal(t, len(_rpk.CommitmentKeys[i].Basis), len(_pk.CommitmentKeys[i].Basis)) + } + proveVerifyCircuit(t, pk, vk) + proveVerifyCircuit(t, rpk, rvk) } From 813f559eb0f872f444efb18b0bc134e591625a76 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 13:25:22 -0600 Subject: [PATCH 031/105] test found cardinality error --- backend/groth16/bn254/mpcsetup/phase1.go | 6 ++++-- backend/groth16/bn254/mpcsetup/setup_test.go | 16 ++++++++++++++-- backend/groth16/bn254/mpcsetup/unit_test.go | 5 ++++- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 5fb618414e..2c92518806 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -105,14 +105,16 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) alphaUpdates[0].Set(alphaUpdate) for i := range alphaUpdates { - alphaUpdates[i].Mul(&tauUpdates[i], &alphaUpdates[1]) + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) betaUpdates[0].Set(betaUpdate) for i := range betaUpdates { - alphaUpdates[i].Mul(&tauUpdates[i], &betaUpdates[1]) + alphaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } scaleG1InPlace(c.G1.BetaTau, betaUpdates) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 7fc7894240..944f5a809d 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -183,7 +183,7 @@ func (circuit *Circuit) Define(api frontend.API) error { func getTestCircuit(t *testing.T) *cs.R1CS { return sync.OnceValue(func() *cs.R1CS { - var circuit Circuit + var circuit superSimpleCircuit //Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &circuit) require.NoError(t, err) return ccs.(*cs.R1CS) @@ -199,7 +199,10 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin hash.SetBytes(m.Sum(nil)) } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) + //assignment :=Circuit{PreImage: preImage, Hash: hash} + assignment := superSimpleCircuit{A: 42} + + witness, err := frontend.NewWitness(&assignment, curve.ID.ScalarField()) require.NoError(t, err) pubWitness, err := witness.Public() @@ -212,3 +215,12 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin err = groth16.Verify(proof, vk, pubWitness) require.NoError(t, err) } + +type superSimpleCircuit struct { + A frontend.Variable `gnark:",public"` +} + +func (circuit *superSimpleCircuit) Define(api frontend.API) error { + api.AssertIsEqual(circuit.A, 42) + return nil +} diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index cc48b25f70..8c72766fad 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -2,6 +2,7 @@ package mpcsetup import ( "bytes" + "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark/backend/groth16" @@ -80,6 +81,7 @@ func TestSetupBeaconOnly(t *testing.T) { _rpk := rpk.(*groth16Impl.ProvingKey) // assert everything is of the same size + require.Equal(t, _rpk.Domain.Cardinality, _pk.Domain.Cardinality) require.Equal(t, len(_rpk.G1.A), len(_pk.G1.A)) require.Equal(t, len(_rpk.G1.B), len(_pk.G1.B)) require.Equal(t, len(_rpk.G1.K), len(_pk.G1.K)) @@ -91,6 +93,7 @@ func TestSetupBeaconOnly(t *testing.T) { require.Equal(t, len(_rpk.CommitmentKeys[i].Basis), len(_pk.CommitmentKeys[i].Basis)) } - proveVerifyCircuit(t, pk, vk) proveVerifyCircuit(t, rpk, rvk) + fmt.Println("regular proof verified") + proveVerifyCircuit(t, pk, vk) } From 49a22a8fca30ee58f196f3babe0c14e9d0defe12 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 15:26:07 -0600 Subject: [PATCH 032/105] fix beta update --- backend/groth16/bn254/mpcsetup/marshal.go | 5 +- backend/groth16/bn254/mpcsetup/phase1.go | 4 +- backend/groth16/bn254/mpcsetup/phase2.go | 2 + backend/groth16/bn254/mpcsetup/setup.go | 2 +- backend/groth16/bn254/mpcsetup/unit_test.go | 110 ++++++++++++++++++++ 5 files changed, 118 insertions(+), 5 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 3511a1a878..4c1ab916c1 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -150,11 +150,12 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { func (c *Phase2Evaluations) refsSlice() []any { N := uint64(len(c.G1.A)) - expectedLen := 3*N + 3 - refs := make([]any, 3, expectedLen) + expectedLen := 3*N + 4 + refs := make([]any, 4, expectedLen) refs[0] = &c.G1.CKK refs[1] = &c.G1.VKK refs[2] = &c.PublicAndCommitmentCommitted + refs[3] = &c.NbConstraints refs = appendRefs(refs, c.G1.A) refs = appendRefs(refs, c.G1.B) refs = appendRefs(refs, c.G2.B) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 2c92518806..c4f9f0c002 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -114,12 +114,12 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) betaUpdates[0].Set(betaUpdate) for i := range betaUpdates { - alphaUpdates[i].Mul(&tauUpdates[i], betaUpdate) + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } scaleG1InPlace(c.G1.BetaTau, betaUpdates) var betaUpdateI big.Int - betaUpdate.SetBigInt(&betaUpdateI) + betaUpdate.BigInt(&betaUpdateI) c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 12f638d0ba..664bf502cb 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -34,6 +34,7 @@ type Phase2Evaluations struct { // TODO @Tabaie rename B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } PublicAndCommitmentCommitted [][]int + NbConstraints uint64 } type Phase2 struct { @@ -225,6 +226,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation var evals Phase2Evaluations commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.NbConstraints = uint64(r1cs.GetNbConstraints()) evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index bb0f3449a0..f4e2662df8 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -34,7 +34,7 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall ) // Initialize PK - pk.Domain = *fft.NewDomain(uint64(len(evals.G1.A))) + pk.Domain = *fft.NewDomain(evals.NbConstraints) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&p.Parameters.G1.Delta) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 8c72766fad..fbf34e8bd1 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" "github.com/stretchr/testify/require" @@ -70,6 +71,8 @@ func TestSetupBeaconOnly(t *testing.T) { p1.Initialize(domainSize) commons := p1.Seal([]byte("beacon 1")) + commons = commonsSmallValues(domainSize, 2, 3, 4) + evals := p2.Initialize(ccs, &commons) pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) @@ -96,4 +99,111 @@ func TestSetupBeaconOnly(t *testing.T) { proveVerifyCircuit(t, rpk, rvk) fmt.Println("regular proof verified") proveVerifyCircuit(t, pk, vk) + fmt.Println("mpc proof verified") +} + +func TestPhase1Contribute(t *testing.T) { + +} + +func TestPhase1Seal(t *testing.T) { + +} + +func commonsSmallValues(N, tau, alpha, beta uint64) SrsCommons { + var ( + res SrsCommons + I big.Int + coeff fr.Element + ) + _, _, g1, g2 := curve.Generators() + tauPowers := powersI(tau, int(2*N-1)) + res.G1.Tau = make([]curve.G1Affine, 2*N-1) + for i := range res.G1.Tau { + tauPowers[i].BigInt(&I) + res.G1.Tau[i].ScalarMultiplication(&g1, &I) + } + + res.G2.Tau = make([]curve.G2Affine, N) + for i := range res.G2.Tau { + tauPowers[i].BigInt(&I) + res.G2.Tau[i].ScalarMultiplication(&g2, &I) + } + + res.G1.AlphaTau = make([]curve.G1Affine, N) + coeff.SetUint64(alpha) + for i := range res.G1.AlphaTau { + var x fr.Element + x.Mul(&tauPowers[i], &coeff) + x.BigInt(&I) + res.G1.AlphaTau[i].ScalarMultiplication(&g1, &I) + } + + res.G1.BetaTau = make([]curve.G1Affine, N) + coeff.SetUint64(beta) + for i := range res.G1.BetaTau { + var x fr.Element + x.Mul(&tauPowers[i], &coeff) + x.BigInt(&I) + res.G1.BetaTau[i].ScalarMultiplication(&g1, &I) + } + + I.SetUint64(beta) + res.G2.Beta.ScalarMultiplication(&g2, &I) + + return res +} + +func powersI(x uint64, n int) []fr.Element { + var y fr.Element + y.SetUint64(x) + return powers(&y, n) +} + +func TestPowers(t *testing.T) { + var x fr.Element + x.SetUint64(2) + x2 := powers(&x, 10) + for i := range x2 { + require.True(t, x2[i].IsUint64()) + require.Equal(t, x2[i].Uint64(), uint64(1< Date: Mon, 16 Dec 2024 15:27:38 -0600 Subject: [PATCH 033/105] test trivial circuit works --- backend/groth16/bn254/mpcsetup/unit_test.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index fbf34e8bd1..0b62c1eb2b 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -71,8 +71,6 @@ func TestSetupBeaconOnly(t *testing.T) { p1.Initialize(domainSize) commons := p1.Seal([]byte("beacon 1")) - commons = commonsSmallValues(domainSize, 2, 3, 4) - evals := p2.Initialize(ccs, &commons) pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) From b59aae1ec78774139e01dd212940745fec809c46 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 15:30:08 -0600 Subject: [PATCH 034/105] localize choice of circuit --- backend/groth16/bn254/mpcsetup/setup_test.go | 30 +++++++++++--------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 944f5a809d..644844f91c 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -181,28 +181,32 @@ func (circuit *Circuit) Define(api frontend.API) error { return err } +func assignCircuit() frontend.Circuit { + return sync.OnceValue(func() frontend.Circuit { + // Build the witness + var preImage, hash fr.Element + { + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + } + + //return &Circuit{PreImage: preImage, Hash: hash} + return &superSimpleCircuit{A: 42} + })() +} + func getTestCircuit(t *testing.T) *cs.R1CS { return sync.OnceValue(func() *cs.R1CS { - var circuit superSimpleCircuit //Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &circuit) + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, assignCircuit()) require.NoError(t, err) return ccs.(*cs.R1CS) })() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { - // Build the witness - var preImage, hash fr.Element - { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) - } - - //assignment :=Circuit{PreImage: preImage, Hash: hash} - assignment := superSimpleCircuit{A: 42} - witness, err := frontend.NewWitness(&assignment, curve.ID.ScalarField()) + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) require.NoError(t, err) pubWitness, err := witness.Public() From 3f224e8890d2f0e2b0c1b5974604d074fed68073 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:46:52 -0600 Subject: [PATCH 035/105] fix phase 2 sigma update --- backend/groth16/bn254/mpcsetup/phase2.go | 5 +-- backend/groth16/bn254/mpcsetup/setup.go | 5 +++ backend/groth16/bn254/mpcsetup/setup_test.go | 4 +-- backend/groth16/bn254/mpcsetup/unit_test.go | 37 ++++++++++++++++++-- 4 files changed, 45 insertions(+), 6 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 664bf502cb..ae82ef1cd4 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -126,8 +126,9 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { for i := range sigma { sigma[i].BigInt(&I) - for j := range sigma { - scale(&p.Parameters.G1.SigmaCKK[i][j]) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) } point := &p.Parameters.G2.Sigma[i] point.ScalarMultiplicationBase(&I) diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index f4e2662df8..682e43dfb7 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -24,6 +24,11 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall // final contributions contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + for i := range len(contributions) { + contributions[i].SetOne() + } + //contributions[0].SetUint64(2) + contributions[1].SetUint64(0) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 644844f91c..94d48b4341 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -191,8 +191,8 @@ func assignCircuit() frontend.Circuit { hash.SetBytes(m.Sum(nil)) } - //return &Circuit{PreImage: preImage, Hash: hash} - return &superSimpleCircuit{A: 42} + return &Circuit{PreImage: preImage, Hash: hash} + //return &superSimpleCircuit{A: 42} })() } diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 0b62c1eb2b..9c39e06d1c 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -70,6 +70,9 @@ func TestSetupBeaconOnly(t *testing.T) { ) p1.Initialize(domainSize) commons := p1.Seal([]byte("beacon 1")) + commons2 := commonsSmallValues(domainSize, 1, 2, 3) + + require.Equal(t, len(commons.G1.Tau), len(commons2.G1.Tau)) evals := p2.Initialize(ccs, &commons) pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) @@ -198,10 +201,40 @@ func TestCommonsUpdate(t *testing.T) { assertG1G2Equal(t, c.G1.BetaTau[0], c.G2.Beta) } +func TestPhase2Update(t *testing.T) { + c := commonsSmallValues(2, 2, 3, 4) + var p Phase2 + p.Initialize(getTestCircuit(t), &c) + + //p.update() +} + func assertG1G2Equal(t *testing.T, p1 curve.G1Affine, p2 curve.G2Affine) { _, _, g1, g2 := curve.Generators() - p2.Neg(&p2) - ok, err := curve.PairingCheck([]curve.G1Affine{p1, g1}, []curve.G2Affine{g2, p2}) + assertPairingsEqual(t, p1, g2, g1, p2) +} + +// asserts e(p1, q1) = r(p2, q2) +func assertPairingsEqual(t *testing.T, p1 curve.G1Affine, p2 curve.G2Affine, q1 curve.G1Affine, q2 curve.G2Affine) { + q1.Neg(&q1) + ok, err := curve.PairingCheck([]curve.G1Affine{p1, q1}, []curve.G2Affine{p2, q2}) require.NoError(t, err) require.True(t, ok) } + +func TestPedersen(t *testing.T) { + cs := getTestCircuit(t) + domainSize := ecc.NextPowerOfTwo(uint64(cs.GetNbConstraints())) + + commons := commonsSmallValues(domainSize, 2, 3, 4) + var p Phase2 + evals := p.Initialize(cs, &commons) + contributions := make([]fr.Element, 1+len(p.Sigmas)) + for i := range contributions { + contributions[i].SetOne() + } + contributions[1].SetUint64(2) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + assertPairingsEqual(t, evals.G1.CKK[0][0], p.Parameters.G2.Sigma[0], p.Parameters.G1.SigmaCKK[0][0], g2) +} From cafdea42e2bfabc31dbe6a9acc2b63fb75822f23 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:48:39 -0600 Subject: [PATCH 036/105] test pedersen --- backend/groth16/bn254/mpcsetup/unit_test.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 9c39e06d1c..06f6e36da1 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -236,5 +236,8 @@ func TestPedersen(t *testing.T) { contributions[1].SetUint64(2) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() - assertPairingsEqual(t, evals.G1.CKK[0][0], p.Parameters.G2.Sigma[0], p.Parameters.G1.SigmaCKK[0][0], g2) + for i := range p.Sigmas { + assertPairingsEqual(t, evals.G1.CKK[0][i], p.Parameters.G2.Sigma[i], p.Parameters.G1.SigmaCKK[0][i], g2) + } + } From 925af79450f807f0b26dfe7dc796db7c8aebe399 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 17:50:57 -0600 Subject: [PATCH 037/105] restore beacon phase2 --- backend/groth16/bn254/mpcsetup/setup.go | 5 ----- backend/groth16/bn254/mpcsetup/unit_test.go | 13 ------------- 2 files changed, 18 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 682e43dfb7..f4e2662df8 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -24,11 +24,6 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall // final contributions contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) - for i := range len(contributions) { - contributions[i].SetOne() - } - //contributions[0].SetUint64(2) - contributions[1].SetUint64(0) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 06f6e36da1..a21ac0b2d5 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -70,15 +70,11 @@ func TestSetupBeaconOnly(t *testing.T) { ) p1.Initialize(domainSize) commons := p1.Seal([]byte("beacon 1")) - commons2 := commonsSmallValues(domainSize, 1, 2, 3) - - require.Equal(t, len(commons.G1.Tau), len(commons2.G1.Tau)) evals := p2.Initialize(ccs, &commons) pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) _pk := pk.(*groth16Impl.ProvingKey) - //_vk := vk.(*groth16Impl.VerifyingKey) rpk, rvk, err := groth16.Setup(ccs) require.NoError(t, err) @@ -201,14 +197,6 @@ func TestCommonsUpdate(t *testing.T) { assertG1G2Equal(t, c.G1.BetaTau[0], c.G2.Beta) } -func TestPhase2Update(t *testing.T) { - c := commonsSmallValues(2, 2, 3, 4) - var p Phase2 - p.Initialize(getTestCircuit(t), &c) - - //p.update() -} - func assertG1G2Equal(t *testing.T, p1 curve.G1Affine, p2 curve.G2Affine) { _, _, g1, g2 := curve.Generators() assertPairingsEqual(t, p1, g2, g1, p2) @@ -239,5 +227,4 @@ func TestPedersen(t *testing.T) { for i := range p.Sigmas { assertPairingsEqual(t, evals.G1.CKK[0][i], p.Parameters.G2.Sigma[i], p.Parameters.G1.SigmaCKK[0][i], g2) } - } From 62a720b00f9c70d093f3efed7307bbf1dad4aa6e Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:18:42 -0600 Subject: [PATCH 038/105] refactor algo utils --- backend/groth16/bn254/mpcsetup/phase1.go | 9 ++++-- backend/groth16/bn254/mpcsetup/phase2.go | 1 + backend/groth16/bn254/mpcsetup/setup_test.go | 29 ++++++------------- backend/groth16/bn254/mpcsetup/unit_test.go | 7 ++++- backend/groth16/bn254/mpcsetup/utils.go | 2 ++ constraint/gkr.go | 19 ++++++------ internal/{algo_utils => utils}/algo_utils.go | 2 +- .../{algo_utils => utils}/algo_utils_test.go | 2 +- internal/utils/test_utils/test_utils.go | 17 +++++++++++ 9 files changed, 53 insertions(+), 35 deletions(-) rename internal/{algo_utils => utils}/algo_utils.go (99%) rename internal/{algo_utils => utils}/algo_utils_test.go (98%) create mode 100644 internal/utils/test_utils/test_utils.go diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index c4f9f0c002..77a59f39f1 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark/internal/utils/test_utils" "math/big" ) @@ -52,10 +53,13 @@ func (p *Phase1) Contribute() { var ( tauContrib, alphaContrib, betaContrib fr.Element ) - fmt.Println("initial tau", p.parameters.G1.Tau[1].String()) - fmt.Println("challenge", p.Challenge) + fmt.Printf("challenge %x\n", p.Challenge) + p.proofs.Tau, tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) + fmt.Println("initial alpha", p.parameters.G1.AlphaTau[0].String()) + test_utils.ConditionalLoggerEnabled = true p.proofs.Alpha, alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) + test_utils.ConditionalLoggerEnabled = false p.proofs.Beta, betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) @@ -168,6 +172,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("domain size mismatch") } + fmt.Println("verifying tau") // verify updates to τ, α, β if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index ae82ef1cd4..2ca2716f24 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -169,6 +169,7 @@ func (p *Phase2) Contribute() { // Initialize is to be run by the coordinator // It involves no coin tosses. A verifier should // simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { size := len(commons.G1.AlphaTau) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 94d48b4341..5760d471b6 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -26,11 +26,10 @@ import ( // TestAll a full integration test of the MPC setup func TestAll(t *testing.T) { - const ( - nbContributionsPhase1 = 3 - nbContributionsPhase2 = 3 - ) + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit @@ -39,13 +38,13 @@ func TestAll(t *testing.T) { domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) var ( - bb bytes.Buffer // simulating network communications - serialized [max(nbContributionsPhase1, nbContributionsPhase2)][]byte - phase1 [nbContributionsPhase1]*Phase1 - p1 Phase1 - phase2 [nbContributionsPhase2]*Phase2 - p2 Phase2 + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) serialize := func(v io.WriterTo) []byte { bb.Reset() @@ -192,7 +191,6 @@ func assignCircuit() frontend.Circuit { } return &Circuit{PreImage: preImage, Hash: hash} - //return &superSimpleCircuit{A: 42} })() } @@ -219,12 +217,3 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin err = groth16.Verify(proof, vk, pubWitness) require.NoError(t, err) } - -type superSimpleCircuit struct { - A frontend.Variable `gnark:",public"` -} - -func (circuit *superSimpleCircuit) Define(api frontend.API) error { - api.AssertIsEqual(circuit.A, 42) - return nil -} diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index a21ac0b2d5..72f5c51046 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -99,8 +99,13 @@ func TestSetupBeaconOnly(t *testing.T) { fmt.Println("mpc proof verified") } -func TestPhase1Contribute(t *testing.T) { +// TestNoContributors tests the beacon and some of the serialization +func TestNoContributors(t *testing.T) { + testAll(t, 0, 0) +} +func TestOnePhase1Contribute(t *testing.T) { + testAll(t, 1, 0) } func TestPhase1Seal(t *testing.T) { diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e8d66b3479..de1804a1fa 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -12,6 +12,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/internal/utils" + "github.com/consensys/gnark/internal/utils/test_utils" "math/big" "math/bits" "runtime" @@ -229,6 +230,7 @@ func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueU // proof of knowledge to commitment. Algorithm 3 from section 3.7 pokBase := genR(proof.contributionCommitment, challenge, dst) // r + test_utils.ConditionalLog("pok base", pokBase.String()) proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) return diff --git a/constraint/gkr.go b/constraint/gkr.go index e1337cb0b9..f9d8727ca3 100644 --- a/constraint/gkr.go +++ b/constraint/gkr.go @@ -4,9 +4,8 @@ import ( "fmt" "sort" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + "github.com/consensys/gnark/internal/utils" ) type GkrVariable int // Just an alias to hide implementation details. May be more trouble than worth @@ -83,26 +82,26 @@ func (d *GkrInfo) Compile(nbInstances int) (GkrPermutations, error) { } } - p.SortedInstances, _ = algo_utils.TopologicalSort(instanceDeps) - p.InstancesPermutation = algo_utils.InvertPermutation(p.SortedInstances) + p.SortedInstances, _ = utils.TopologicalSort(instanceDeps) + p.InstancesPermutation = utils.InvertPermutation(p.SortedInstances) // this whole circuit sorting is a bit of a charade. if things are built using an api, there's no way it could NOT already be topologically sorted // worth keeping for future-proofing? - inputs := algo_utils.Map(d.Circuit, func(w GkrWire) []int { + inputs := utils.Map(d.Circuit, func(w GkrWire) []int { return w.Inputs }) var uniqueOuts [][]int - p.SortedWires, uniqueOuts = algo_utils.TopologicalSort(inputs) - p.WiresPermutation = algo_utils.InvertPermutation(p.SortedWires) - wirePermutationAt := algo_utils.SliceAt(p.WiresPermutation) + p.SortedWires, uniqueOuts = utils.TopologicalSort(inputs) + p.WiresPermutation = utils.InvertPermutation(p.SortedWires) + wirePermutationAt := utils.SliceAt(p.WiresPermutation) sorted := make([]GkrWire, len(d.Circuit)) // TODO: Directly manipulate d.Circuit instead for newI, oldI := range p.SortedWires { oldW := d.Circuit[oldI] if !oldW.IsInput() { - d.MaxNIns = utils.Max(d.MaxNIns, len(oldW.Inputs)) + d.MaxNIns = max(d.MaxNIns, len(oldW.Inputs)) } for j := range oldW.Dependencies { @@ -122,7 +121,7 @@ func (d *GkrInfo) Compile(nbInstances int) (GkrPermutations, error) { sorted[newI] = GkrWire{ Gate: oldW.Gate, - Inputs: algo_utils.Map(oldW.Inputs, wirePermutationAt), + Inputs: utils.Map(oldW.Inputs, wirePermutationAt), Dependencies: oldW.Dependencies, NbUniqueOutputs: len(uniqueOuts[oldI]), } diff --git a/internal/algo_utils/algo_utils.go b/internal/utils/algo_utils.go similarity index 99% rename from internal/algo_utils/algo_utils.go rename to internal/utils/algo_utils.go index 37dbc8c464..f836625370 100644 --- a/internal/algo_utils/algo_utils.go +++ b/internal/utils/algo_utils.go @@ -1,4 +1,4 @@ -package algo_utils +package utils import "github.com/bits-and-blooms/bitset" diff --git a/internal/algo_utils/algo_utils_test.go b/internal/utils/algo_utils_test.go similarity index 98% rename from internal/algo_utils/algo_utils_test.go rename to internal/utils/algo_utils_test.go index 85ab4bf294..2925bd6692 100644 --- a/internal/algo_utils/algo_utils_test.go +++ b/internal/utils/algo_utils_test.go @@ -1,4 +1,4 @@ -package algo_utils +package utils import ( "github.com/stretchr/testify/assert" diff --git a/internal/utils/test_utils/test_utils.go b/internal/utils/test_utils/test_utils.go new file mode 100644 index 0000000000..fcebaf148f --- /dev/null +++ b/internal/utils/test_utils/test_utils.go @@ -0,0 +1,17 @@ +package test_utils + +import "log" + +var ConditionalLoggerEnabled bool + +func ConditionalLog(v ...any) { + if ConditionalLoggerEnabled { + log.Println(v...) + } +} + +func ConditionalLogf(format string, v ...any) { + if ConditionalLoggerEnabled { + log.Printf(format, v...) + } +} From 7ffc662617e76341a863570c76c8e2c46fcef5eb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:20:24 -0600 Subject: [PATCH 039/105] algo_utils as alias --- constraint/bls12-377/gkr.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/constraint/bls12-377/gkr.go b/constraint/bls12-377/gkr.go index e344e35ae1..44a7d964d0 100644 --- a/constraint/bls12-377/gkr.go +++ b/constraint/bls12-377/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" From 12acfe46026b3ed4bc5afa0790c0c90c24215d2a Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:22:51 -0600 Subject: [PATCH 040/105] generify algo_utils removal --- constraint/bls12-381/gkr.go | 2 +- constraint/bls24-315/gkr.go | 2 +- constraint/bls24-317/gkr.go | 2 +- constraint/bn254/gkr.go | 2 +- constraint/bw6-633/gkr.go | 2 +- constraint/bw6-761/gkr.go | 2 +- internal/generator/backend/template/representations/gkr.go.tmpl | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/constraint/bls12-381/gkr.go b/constraint/bls12-381/gkr.go index 3c25df9812..725d8bd045 100644 --- a/constraint/bls12-381/gkr.go +++ b/constraint/bls12-381/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bls24-315/gkr.go b/constraint/bls24-315/gkr.go index 2ed05f8783..041cba457c 100644 --- a/constraint/bls24-315/gkr.go +++ b/constraint/bls24-315/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bls24-317/gkr.go b/constraint/bls24-317/gkr.go index 5cdb3eb92f..55d1526dc8 100644 --- a/constraint/bls24-317/gkr.go +++ b/constraint/bls24-317/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bn254/gkr.go b/constraint/bn254/gkr.go index 5efed6e822..91fe35470f 100644 --- a/constraint/bn254/gkr.go +++ b/constraint/bn254/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bw6-633/gkr.go b/constraint/bw6-633/gkr.go index fa8e83258c..0274a88d3e 100644 --- a/constraint/bw6-633/gkr.go +++ b/constraint/bw6-633/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bw6-761/gkr.go b/constraint/bw6-761/gkr.go index 6e05fe8a90..20cc30fce9 100644 --- a/constraint/bw6-761/gkr.go +++ b/constraint/bw6-761/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/internal/generator/backend/template/representations/gkr.go.tmpl b/internal/generator/backend/template/representations/gkr.go.tmpl index 32ff5da874..9030cc43a7 100644 --- a/internal/generator/backend/template/representations/gkr.go.tmpl +++ b/internal/generator/backend/template/representations/gkr.go.tmpl @@ -7,7 +7,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" From da7241bd999fb96a65c8cf5d9ee18df9c8d09081 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 18:43:33 -0600 Subject: [PATCH 041/105] fix alpha update test --- backend/groth16/bn254/mpcsetup/phase1.go | 8 ++------ backend/groth16/bn254/mpcsetup/utils.go | 2 -- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 77a59f39f1..a223daee35 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -53,13 +53,9 @@ func (p *Phase1) Contribute() { var ( tauContrib, alphaContrib, betaContrib fr.Element ) - fmt.Printf("challenge %x\n", p.Challenge) p.proofs.Tau, tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) - fmt.Println("initial alpha", p.parameters.G1.AlphaTau[0].String()) - test_utils.ConditionalLoggerEnabled = true p.proofs.Alpha, alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) - test_utils.ConditionalLoggerEnabled = false p.proofs.Beta, betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) @@ -172,14 +168,14 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("domain size mismatch") } - fmt.Println("verifying tau") // verify updates to τ, α, β if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{p.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } + test_utils.ConditionalLoggerEnabled = false if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index de1804a1fa..e8d66b3479 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -12,7 +12,6 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/internal/utils" - "github.com/consensys/gnark/internal/utils/test_utils" "math/big" "math/bits" "runtime" @@ -230,7 +229,6 @@ func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueU // proof of knowledge to commitment. Algorithm 3 from section 3.7 pokBase := genR(proof.contributionCommitment, challenge, dst) // r - test_utils.ConditionalLog("pok base", pokBase.String()) proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) return From 12780f39aa2a8e8deed345640eec0571b4f0c40b Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 19:06:15 -0600 Subject: [PATCH 042/105] test minimal failing test for multiValueUpdateCheck --- backend/groth16/bn254/mpcsetup/phase1.go | 24 +++++++++++++++------ backend/groth16/bn254/mpcsetup/unit_test.go | 7 ++++-- 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a223daee35..7baa4fdfa9 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -187,6 +187,17 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c []curve.G1Affine, d []curve.G1Affine) error { // lemma: let K be a field and // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ @@ -227,21 +238,22 @@ func (p *Phase1) Verify(next *Phase1) error { // 4. d₀ = β // and so the desired results follow - ends := partialSums(len(next.parameters.G1.Tau), len(next.parameters.G1.AlphaTau), len(next.parameters.G1.BetaTau)) + ends := partialSums(len(a), len(c), len(d)) g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, next.parameters.G1.Tau...) - g1s = append(g1s, next.parameters.G1.AlphaTau...) - g1s = append(g1s, next.parameters.G1.BetaTau...) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(next.parameters.G2.Tau, linearCombCoeffs(len(next.parameters.G2.Tau))) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("value update check failed") + return errors.New("multi-value update check failed") } return nil + } func (p *Phase1) hash() []byte { diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 72f5c51046..e863d3bed9 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -108,8 +108,11 @@ func TestOnePhase1Contribute(t *testing.T) { testAll(t, 1, 0) } -func TestPhase1Seal(t *testing.T) { - +func TestUpdateCheck(t *testing.T) { + _, _, g1, g2 := curve.Generators() + g1Slice := []curve.G1Affine{g1, g1, g1} + g2Slice := []curve.G2Affine{g2, g2} + require.NoError(t, multiValueUpdateCheck(g1Slice, g2Slice, g1Slice, g1Slice)) } func commonsSmallValues(N, tau, alpha, beta uint64) SrsCommons { From a2184d43babf8f1e0b0312747fdaeeceef891cc7 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 19:40:21 -0600 Subject: [PATCH 043/105] test bivariateMonomials --- backend/groth16/bn254/mpcsetup/phase1.go | 2 +- backend/groth16/bn254/mpcsetup/unit_test.go | 33 +++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 7baa4fdfa9..97fe9811b4 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -197,7 +197,7 @@ func (p *Phase1) Verify(next *Phase1) error { // multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l // in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c []curve.G1Affine, d []curve.G1Affine) error { +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { // lemma: let K be a field and // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index e863d3bed9..8a012e3781 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -10,6 +10,7 @@ import ( groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" "github.com/stretchr/testify/require" "math/big" + "slices" "testing" ) @@ -236,3 +237,35 @@ func TestPedersen(t *testing.T) { assertPairingsEqual(t, evals.G1.CKK[0][i], p.Parameters.G2.Sigma[i], p.Parameters.G1.SigmaCKK[0][i], g2) } } + +func TestBivariateRandomMonomials(t *testing.T) { + xDeg := []int{3, 2, 3} + ends := partialSums(xDeg...) + values := bivariateRandomMonomials(ends...) + //extract the variables + x := make([]fr.Element, slices.Max(xDeg)) + y := make([]fr.Element, len(ends)) + x[1].Div(&values[1], &values[0]) + y[1].Div(&values[xDeg[0]], &values[0]) + + x[0].SetOne() + y[0].SetOne() + + for i := range x[:len(x)-1] { + x[i+1].Mul(&x[i], &x[1]) + } + + for i := range y[:len(x)-1] { + y[i+1].Mul(&y[i], &y[1]) + } + + prevEnd := 0 + for i := range ends { + for j := range xDeg[i] { + var z fr.Element + z.Mul(&y[i], &x[j]) + require.Equal(t, z.String(), values[prevEnd+j].String(), "X^%d Y^%d: expected %s, encountered %s", j, i) + } + prevEnd = ends[i] + } +} From 4f16a247a637f7ddcf0e6807bb7f63c9880cfe4f Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 20:48:04 -0600 Subject: [PATCH 044/105] failing linear combination tests --- backend/groth16/bn254/mpcsetup/phase1.go | 2 + backend/groth16/bn254/mpcsetup/phase2.go | 2 + backend/groth16/bn254/mpcsetup/setup.go | 2 + backend/groth16/bn254/mpcsetup/unit_test.go | 64 +++++++++++++++++++++ backend/groth16/bn254/mpcsetup/utils.go | 6 +- 5 files changed, 74 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 97fe9811b4..7097a7e075 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -44,6 +44,8 @@ type Phase1 struct { Challenge []byte // Hash of the transcript PRIOR to this participant } +// TODO @Tabaie use batch scalar multiplication whenever applicable + // Contribute contributes randomness to the Phase1 object. This mutates Phase1. // p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. func (p *Phase1) Contribute() { diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 2ca2716f24..5b64068c48 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -59,6 +59,8 @@ type Phase2 struct { Challenge []byte } +// TODO @Tabaie use batch scalar multiplication whenever applicable + func (p *Phase2) Verify(next *Phase2) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index f4e2662df8..739438c297 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -13,6 +13,8 @@ import ( groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" ) +// TODO @Tabaie use batch scalar multiplication whenever applicable + // Seal performs the final contribution and outputs the proving and verifying keys. // No randomization is performed at this step. // A verifier should simply re-run this and check diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 8a012e3781..ab7baafa3e 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -269,3 +269,67 @@ func TestBivariateRandomMonomials(t *testing.T) { prevEnd = ends[i] } } + +func TestLinearCombinationsG1(t *testing.T) { + + testLinearCombinationsG1 := func(ends []int, powers, truncatedPowers, shiftedPowers []fr.Element, A ...curve.G1Affine) { + + multiExpConfig := ecc.MultiExpConfig{ + NbTasks: 1, + } + + if len(A) == 0 { + A = make([]curve.G1Affine, ends[len(ends)-1]) + var err error + for i := range A { + A[i], err = curve.HashToG1([]byte{byte(i)}, nil) + require.NoError(t, err) + } + } + + truncated, shifted := linearCombinationsG1(slices.Clone(A), powers, ends) + + var res curve.G1Affine + + _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) + require.NoError(t, err) + require.Equal(t, truncated, res) + + _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) + require.NoError(t, err) + require.Equal(t, shifted, res) + } + + _, _, g1, _ := curve.Generators() + var infty curve.G1Affine + + testLinearCombinationsG1( + []int{3}, + frs(1, 1, 1), + frs(1, 1, 1), + frs(1, 1, 1), + g1, infty, g1, + ) + + testLinearCombinationsG1( + []int{3}, + frs(1, 2, 4), + frs(1, 2, 0), + frs(0, 1, 2), + ) + + testLinearCombinationsG1( + []int{4, 7}, + frs(1, 2, 4, 8, 3, 6, 12), + frs(1, 2, 4, 0, 3, 6, 0), + frs(0, 1, 2, 4, 0, 3, 6), + ) +} + +func frs(x ...int) []fr.Element { + res := make([]fr.Element, len(x)) + for i := range res { + res[i].SetUint64(uint64(x[i])) + } + return res +} diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e8d66b3479..e14d47f5d7 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -17,6 +17,8 @@ import ( "runtime" ) +// TODO @Tabaie use batch scalar multiplication whenever applicable + func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -114,8 +116,8 @@ func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { // + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] // .... (shifted) // -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes -// the slices powers and A will be modified +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// The slices powers and A will be modified func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { if ends[len(ends)-1] != len(A) || len(A) != len(powers) { panic("lengths mismatch") From 3b05489dc4aa8061226fa28437e633b02c51366c Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 21:32:02 -0600 Subject: [PATCH 045/105] fix rewrite linearCombs --- backend/groth16/bn254/mpcsetup/unit_test.go | 24 +++++++++---- backend/groth16/bn254/mpcsetup/utils.go | 40 +++++++++++---------- 2 files changed, 38 insertions(+), 26 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index ab7baafa3e..759b4d15bf 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -291,24 +291,34 @@ func TestLinearCombinationsG1(t *testing.T) { var res curve.G1Affine - _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) + _, err := res.MultiExp(A, shiftedPowers, multiExpConfig) require.NoError(t, err) - require.Equal(t, truncated, res) + require.Equal(t, res, shifted) - _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) + _, err = res.MultiExp(A, truncatedPowers, multiExpConfig) require.NoError(t, err) - require.Equal(t, shifted, res) + require.Equal(t, res, truncated) } _, _, g1, _ := curve.Generators() var infty curve.G1Affine + for i := range 10 { + x0 := fr.NewElement(uint64(i - 5))[0] + fmt.Printf("%d: %d 0x%x\n", i-5, x0, x0) + } + var acc curve.G1Affine + for i := range 5 { + fmt.Printf("%dg: %d 0x%x\n", i, acc.X[0], acc.X[0]) + acc.Add(&acc, &g1) + } + testLinearCombinationsG1( []int{3}, frs(1, 1, 1), - frs(1, 1, 1), - frs(1, 1, 1), - g1, infty, g1, + frs(1, 1, 0), + frs(0, 1, 1), + g1, infty, infty, ) testLinearCombinationsG1( diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index e14d47f5d7..c37017ecb0 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -117,45 +117,47 @@ func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { // .... (shifted) // // It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. // The slices powers and A will be modified func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { if ends[len(ends)-1] != len(A) || len(A) != len(powers) { panic("lengths mismatch") } - largeCoeffs := make([]fr.Element, len(ends)) + // zero out the large coefficients for i := range ends { - largeCoeffs[i].Neg(&powers[ends[i]-1]) powers[ends[i]-1].SetZero() } + copy(powers[1:], powers) msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - if _, err := shifted.MultiExp(A, powers, msmCfg); err != nil { + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { panic(err) } - // compute truncated as - // r.shifted - // + powers[0].A[0] + powers[ends[0].A[ends[0]] + ... - // - powers[ends[0]-1].A[ends[0]-1] - powers[ends[1]-1].A[ends[1]-1] - ... - r := powers[1] + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) prevEnd := 0 - for i := range ends { - if ends[i] <= prevEnd { - panic("non-increasing ends") - } - - powers[2*i] = powers[prevEnd] - powers[2*i+1] = largeCoeffs[i] + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd+1], &rInvNeg) + powers[2*i+1] = powers[ends[i]-1] A[2*i] = A[prevEnd] A[2*i+1] = A[ends[i]-1] - - prevEnd = ends[i] } - powers[len(ends)*2] = r - A[len(ends)*2] = shifted + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = A[prevEnd] // TODO @Tabaie O(1) MSM worth it? if _, err := truncated.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { From d31ad6c37b7411dd600199e1096bcafdcb421383 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 21:46:58 -0600 Subject: [PATCH 046/105] fix some linComb errors --- backend/groth16/bn254/mpcsetup/unit_test.go | 16 ++++++++++++---- backend/groth16/bn254/mpcsetup/utils.go | 3 +-- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 759b4d15bf..93b28fcc3e 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -291,13 +291,13 @@ func TestLinearCombinationsG1(t *testing.T) { var res curve.G1Affine - _, err := res.MultiExp(A, shiftedPowers, multiExpConfig) + _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) require.NoError(t, err) - require.Equal(t, res, shifted) + require.Equal(t, res, truncated, "truncated") - _, err = res.MultiExp(A, truncatedPowers, multiExpConfig) + _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) require.NoError(t, err) - require.Equal(t, res, truncated) + require.Equal(t, res, shifted, "shifted") } _, _, g1, _ := curve.Generators() @@ -313,6 +313,14 @@ func TestLinearCombinationsG1(t *testing.T) { acc.Add(&acc, &g1) } + testLinearCombinationsG1( + []int{3}, + frs(1, 1, 1), + frs(1, 1, 0), + frs(0, 1, 1), + infty, infty, g1, + ) + testLinearCombinationsG1( []int{3}, frs(1, 1, 1), diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index c37017ecb0..279b972eb7 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -128,7 +128,6 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( for i := range ends { powers[ends[i]-1].SetZero() } - copy(powers[1:], powers) msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} @@ -160,7 +159,7 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( A[2*len(ends)] = A[prevEnd] // TODO @Tabaie O(1) MSM worth it? - if _, err := truncated.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { panic(err) } From 528bc7035c1442ebe7b1e571c83586a79ad93a40 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 21:50:59 -0600 Subject: [PATCH 047/105] fix another linComb case --- backend/groth16/bn254/mpcsetup/utils.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 279b972eb7..f45481f538 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -150,8 +150,8 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... // + r⁻¹.truncated for i := range ends { - powers[2*i].Mul(&powers[prevEnd+1], &rInvNeg) - powers[2*i+1] = powers[ends[i]-1] + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] A[2*i] = A[prevEnd] A[2*i+1] = A[ends[i]-1] } From 157c9ead5c2e0c49adc90c0227fb7d400350c6b1 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:03:57 -0600 Subject: [PATCH 048/105] fix another shifted case --- backend/groth16/bn254/mpcsetup/unit_test.go | 8 ++++++++ backend/groth16/bn254/mpcsetup/utils.go | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 93b28fcc3e..e22dcfbad0 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -313,6 +313,14 @@ func TestLinearCombinationsG1(t *testing.T) { acc.Add(&acc, &g1) } + testLinearCombinationsG1( + []int{3}, + frs(1, 1, 1), + frs(1, 1, 0), + frs(0, 1, 1), + infty, g1, infty, + ) + testLinearCombinationsG1( []int{3}, frs(1, 1, 1), diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f45481f538..19549f3241 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -156,7 +156,7 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( A[2*i+1] = A[ends[i]-1] } powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = A[prevEnd] + A[2*len(ends)] = truncated // TODO @Tabaie O(1) MSM worth it? if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { From 9536668b5567a6c240c79ea4e58a6a58774945c5 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:49:37 -0600 Subject: [PATCH 049/105] fix linearCombination --- backend/groth16/bn254/mpcsetup/unit_test.go | 36 ++++++++++++++++++--- backend/groth16/bn254/mpcsetup/utils.go | 1 + 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index e22dcfbad0..9e4b98a9fc 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -304,14 +304,24 @@ func TestLinearCombinationsG1(t *testing.T) { var infty curve.G1Affine for i := range 10 { - x0 := fr.NewElement(uint64(i - 5))[0] - fmt.Printf("%d: %d 0x%x\n", i-5, x0, x0) + var x0 fr.Element + x0.SetInt64(int64(i - 5)) + fmt.Printf("%d: %d 0x%x\n", i-5, x0[0], x0[0]) } var acc curve.G1Affine - for i := range 5 { - fmt.Printf("%dg: %d 0x%x\n", i, acc.X[0], acc.X[0]) + acc.Neg(&g1) + for i := range 6 { + fmt.Printf("%dg: %d 0x%x\n", i-1, acc.Y[0], acc.Y[0]) acc.Add(&acc, &g1) } + + testLinearCombinationsG1( + []int{3}, + frs(1, -1, 1), + frs(1, -1, 0), + frs(0, 1, -1), + infty, g1, infty, + ) testLinearCombinationsG1( []int{3}, @@ -344,6 +354,22 @@ func TestLinearCombinationsG1(t *testing.T) { frs(0, 1, 2), ) + testLinearCombinationsG1( + []int{3, 6}, + frs(1, 1, 1, 1, 1, 1), + frs(1, 1, 0, 1, 1, 0), + frs(0, 1, 1, 0, 1, 1), + g1, infty, infty, infty, infty, infty, + ) + + testLinearCombinationsG1( + []int{3, 6}, + frs(1, -1, 1, 1, -1, 1), + frs(1, -1, 0, 1, -1, 0), + frs(0, 1, -1, 0, 1, -1), + g1, infty, infty, infty, infty, infty, + ) + testLinearCombinationsG1( []int{4, 7}, frs(1, 2, 4, 8, 3, 6, 12), @@ -355,7 +381,7 @@ func TestLinearCombinationsG1(t *testing.T) { func frs(x ...int) []fr.Element { res := make([]fr.Element, len(x)) for i := range res { - res[i].SetUint64(uint64(x[i])) + res[i].SetInt64(int64(x[i])) } return res } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 19549f3241..45674061ad 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -154,6 +154,7 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( powers[2*i+1] = powers[ends[i]-2] A[2*i] = A[prevEnd] A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated A[2*len(ends)] = truncated From 176ef92d491697165dc3b995f14a5ffa60855431 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Mon, 16 Dec 2024 22:52:03 -0600 Subject: [PATCH 050/105] remove prints --- backend/groth16/bn254/mpcsetup/unit_test.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 9e4b98a9fc..499aeef166 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -303,18 +303,6 @@ func TestLinearCombinationsG1(t *testing.T) { _, _, g1, _ := curve.Generators() var infty curve.G1Affine - for i := range 10 { - var x0 fr.Element - x0.SetInt64(int64(i - 5)) - fmt.Printf("%d: %d 0x%x\n", i-5, x0[0], x0[0]) - } - var acc curve.G1Affine - acc.Neg(&g1) - for i := range 6 { - fmt.Printf("%dg: %d 0x%x\n", i-1, acc.Y[0], acc.Y[0]) - acc.Add(&acc, &g1) - } - testLinearCombinationsG1( []int{3}, frs(1, -1, 1), From 6650473d52310b8c7f9f07af1bb4a04007be5279 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 17 Dec 2024 12:04:31 -0600 Subject: [PATCH 051/105] fix linearCombs G2 --- backend/groth16/bn254/mpcsetup/unit_test.go | 85 ++++++++++++++++----- backend/groth16/bn254/mpcsetup/utils.go | 31 +++++--- 2 files changed, 85 insertions(+), 31 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 499aeef166..d8c0f0011a 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -272,7 +272,7 @@ func TestBivariateRandomMonomials(t *testing.T) { func TestLinearCombinationsG1(t *testing.T) { - testLinearCombinationsG1 := func(ends []int, powers, truncatedPowers, shiftedPowers []fr.Element, A ...curve.G1Affine) { + test := func(ends []int, powers, truncatedPowers, shiftedPowers []fr.Element, A ...curve.G1Affine) { multiExpConfig := ecc.MultiExpConfig{ NbTasks: 1, @@ -300,65 +300,65 @@ func TestLinearCombinationsG1(t *testing.T) { require.Equal(t, res, shifted, "shifted") } - _, _, g1, _ := curve.Generators() + _, _, g, _ := curve.Generators() var infty curve.G1Affine - testLinearCombinationsG1( + test( []int{3}, frs(1, -1, 1), frs(1, -1, 0), frs(0, 1, -1), - infty, g1, infty, + infty, g, infty, ) - testLinearCombinationsG1( + test( []int{3}, frs(1, 1, 1), frs(1, 1, 0), frs(0, 1, 1), - infty, g1, infty, + infty, g, infty, ) - testLinearCombinationsG1( + test( []int{3}, frs(1, 1, 1), frs(1, 1, 0), frs(0, 1, 1), - infty, infty, g1, + infty, infty, g, ) - testLinearCombinationsG1( + test( []int{3}, frs(1, 1, 1), frs(1, 1, 0), frs(0, 1, 1), - g1, infty, infty, + g, infty, infty, ) - testLinearCombinationsG1( + test( []int{3}, frs(1, 2, 4), frs(1, 2, 0), frs(0, 1, 2), ) - testLinearCombinationsG1( + test( []int{3, 6}, frs(1, 1, 1, 1, 1, 1), frs(1, 1, 0, 1, 1, 0), frs(0, 1, 1, 0, 1, 1), - g1, infty, infty, infty, infty, infty, + g, infty, infty, infty, infty, infty, ) - testLinearCombinationsG1( + test( []int{3, 6}, frs(1, -1, 1, 1, -1, 1), frs(1, -1, 0, 1, -1, 0), frs(0, 1, -1, 0, 1, -1), - g1, infty, infty, infty, infty, infty, + g, infty, infty, infty, infty, infty, ) - testLinearCombinationsG1( + test( []int{4, 7}, frs(1, 2, 4, 8, 3, 6, 12), frs(1, 2, 4, 0, 3, 6, 0), @@ -366,10 +366,53 @@ func TestLinearCombinationsG1(t *testing.T) { ) } -func frs(x ...int) []fr.Element { - res := make([]fr.Element, len(x)) - for i := range res { - res[i].SetInt64(int64(x[i])) +func TestLinearCombinationsG2(t *testing.T) { + test := func(powers []fr.Element, A ...curve.G2Affine) { + + multiExpConfig := ecc.MultiExpConfig{ + NbTasks: 1, + } + + if len(A) == 0 { + A = make([]curve.G2Affine, len(powers)) + var err error + for i := range A { + A[i], err = curve.RandomOnG2() + require.NoError(t, err) + } + } + + truncated, shifted := linearCombinationsG2(slices.Clone(A), powers) + + truncatedPowers := make([]fr.Element, len(powers)) + copy(truncatedPowers[:len(truncatedPowers)-1], powers) + shiftedPowers := make([]fr.Element, len(powers)) + copy(shiftedPowers[1:], powers) + + var res curve.G2Affine + + _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) + require.NoError(t, err) + require.Equal(t, res, truncated, "truncated") + + _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) + require.NoError(t, err) + require.Equal(t, res, shifted, "shifted") } - return res + + _, _, _, g := curve.Generators() + var infty curve.G2Affine + + test( + frs(1, 2, 4), + infty, infty, g, + ) + + test( + frs(1, -1, 1), + ) + + test( + frs(1, 3, 9, 27, 81), + ) } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 45674061ad..bac412c893 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -168,20 +168,31 @@ func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) ( } // linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers) -// the results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - // the common section, 1 to N-2 - var common curve.G2Affine - if _, err := common.MultiExp(A[1:len(A)-1], rPowers[:len(A)-2], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { // A[1] + r.A[2] + ... + rᴺ⁻³.A[N-2] + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { panic(err) } - var c big.Int - rPowers[1].BigInt(&c) - truncated.ScalarMultiplication(&common, &c).Add(&truncated, &A[0]) // A[0] + r.A[1] + r².A[2] + ... + rᴺ⁻².A[N-2] - rPowers[len(A)-1].BigInt(&c) - shifted.ScalarMultiplication(&A[len(A)-1], &c).Add(&shifted, &common) + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] return } From bb916629de9562dbba76a13f99d250b9cc76aa40 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 17 Dec 2024 14:50:37 -0600 Subject: [PATCH 052/105] fix phase1 verification --- .../groth16/bn254/mpcsetup/marshal_test.go | 34 ------ backend/groth16/bn254/mpcsetup/phase2.go | 3 +- backend/groth16/bn254/mpcsetup/setup.go | 2 +- backend/groth16/bn254/mpcsetup/setup_test.go | 3 +- backend/groth16/bn254/mpcsetup/unit_test.go | 101 +++++++++++++++++- go.mod | 6 +- go.sum | 4 + io/roundtrip.go | 35 ++++-- 8 files changed, 142 insertions(+), 46 deletions(-) delete mode 100644 backend/groth16/bn254/mpcsetup/marshal_test.go diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go deleted file mode 100644 index adbfc3fe0e..0000000000 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -/* TODO bring this back -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - var srs1 Phase1 - srs1.Initialize(1 << 9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} -*/ diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 5b64068c48..957a3f70ab 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -34,7 +34,7 @@ type Phase2Evaluations struct { // TODO @Tabaie rename B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } PublicAndCommitmentCommitted [][]int - NbConstraints uint64 + NbConstraints uint64 // TODO unnecessary. len(Z) has that information (domain size) } type Phase2 struct { @@ -348,6 +348,7 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 739438c297..715ac7fc3d 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -36,7 +36,7 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall ) // Initialize PK - pk.Domain = *fft.NewDomain(evals.NbConstraints) + pk.Domain = *fft.NewDomain(evals.NbConstraints) // TODO @Tabaie replace with len(Z)+1 pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&p.Parameters.G1.Delta) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 5760d471b6..d5714bca9a 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -12,6 +12,7 @@ import ( "github.com/consensys/gnark-crypto/ecc/bn254/fr" cs "github.com/consensys/gnark/constraint/bn254" "io" + "slices" "sync" "testing" @@ -50,7 +51,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { bb.Reset() _, err := v.WriteTo(&bb) assert.NoError(err) - return bb.Bytes() + return slices.Clone(bb.Bytes()) } deserialize := func(v io.ReaderFrom, b []byte) { n, err := v.ReadFrom(bytes.NewReader(b)) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index d8c0f0011a..35cae3d231 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -8,6 +8,11 @@ import ( "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + gnarkio "github.com/consensys/gnark/io" "github.com/stretchr/testify/require" "math/big" "slices" @@ -106,7 +111,7 @@ func TestNoContributors(t *testing.T) { } func TestOnePhase1Contribute(t *testing.T) { - testAll(t, 1, 0) + testAll(t, 2, 0) } func TestUpdateCheck(t *testing.T) { @@ -416,3 +421,97 @@ func TestLinearCombinationsG2(t *testing.T) { frs(1, 3, 9, 27, 81), ) } + +func ones(N int) []fr.Element { + res := make([]fr.Element, N) + for i := range res { + res[i].SetOne() + } + return res +} + +func frs(x ...int) []fr.Element { + res := make([]fr.Element, len(x)) + for i := range res { + res[i].SetInt64(int64(x[i])) + } + return res +} + +func TestSerialization(t *testing.T) { + + testRoundtrip := func(_cs constraint.ConstraintSystem) { + var ( + p1 Phase1 + p2 Phase2 + ) + p1.Initialize(ecc.NextPowerOfTwo(uint64(_cs.GetNbConstraints()))) + commons := p1.Seal([]byte("beacon 1")) + + p2.Initialize(_cs.(*cs.R1CS), &commons) + p2.Contribute() + require.NoError(t, gnarkio.RoundTripCheck(&p2, func() interface{} { return new(Phase2) })) + } + + /*var p Phase2 + const b64 = "AACNaN0mCOtKUAD0aEvRP0h7pXctaB+w5Mwsb+skm2yDuPzlwTs+qCFf/3INR+fP/lHY6BLnqXyBjAIgCoPxOcSIEG0tcty/TAiaCN3lHCRacU+upLP+WpngByrrxbN9KrhmQLY3mhOHaV5Jo3W9pI2lTpLK9ZjkQpYKd92YCRKkJ9LyX3wqeYR4jQFf1mxtfJSNgluSZUUn3AoUSDmvh8m87TRh/JRcRZnq40BgnhkJ5nHs9siMSmhWGFjGgW/mOqpyrFoZEoK2rP+AT6ylkNGYxMmOBUj0meoeI2FB7RDqcuSxQOL1XK+Pm1dhxND33cykwpTF4oCrqQzSonxQGn+wFNzaYREOmkjCS9i12NbpXNyN2b9YpmujAL/GSD5LAwKNaN0mCOtKUAD0aEvRP0h7pXctaB+w5Mwsb+skm2yDuJ8HrqP1uckhSJCcTOeHMHyh0VqJtnoMhkRAWRPEWcsqIP3sH81riS5ARP1Pv172lVAmfoXnCzwFPNFPnvdSGFk=" + b, err := base64.StdEncoding.DecodeString(b64) + require.NoError(t, err) + n, err := p.ReadFrom(bytes.NewReader(b)) + require.NoError(t, err) + require.Equal(t, int64(len(b)), n)*/ + + _cs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &tinyCircuit{}) + require.NoError(t, err) + testRoundtrip(_cs) + + testRoundtrip(getTestCircuit(t)) +} + +type tinyCircuit struct { + X [4]frontend.Variable `gnark:",public"` +} + +func (c *tinyCircuit) Define(api frontend.API) error { + for i := range c.X { + api.AssertIsEqual(c.X[i], i) + } + return nil +} + +func (p *Phase2) Equal(o *Phase2) bool { + + if p.Parameters.G2.Delta != o.Parameters.G2.Delta { + print("g2 delta") + } + + if p.Delta != o.Delta { + print("proof delta") + } + + if p.Parameters.G1.Delta != o.Parameters.G1.Delta { + print("g1 delta") + } + + return p.Parameters.G2.Delta == o.Parameters.G2.Delta && + slices.Equal(p.Sigmas, o.Sigmas) && + // bytes.Equal(p.Challenge, o.Challenge) && This function is used in serialization round-trip testing, and we deliberately don't write the challenges + p.Delta == o.Delta && + sliceSliceEqual(p.Parameters.G1.SigmaCKK, o.Parameters.G1.SigmaCKK) && + p.Parameters.G1.Delta == o.Parameters.G1.Delta && + slices.Equal(p.Parameters.G1.Z, o.Parameters.G1.Z) && + slices.Equal(p.Parameters.G1.PKK, o.Parameters.G1.PKK) && + slices.Equal(p.Parameters.G2.Sigma, o.Parameters.G2.Sigma) +} + +func sliceSliceEqual[T comparable](a, b [][]T) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if !slices.Equal(a[i], b[i]) { + return false + } + } + return true +} diff --git a/go.mod b/go.mod index fbf26d48f9..f5b5106d52 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,7 @@ require ( github.com/ronanh/intcomp v1.1.0 github.com/rs/zerolog v1.33.0 github.com/stretchr/testify v1.9.0 - golang.org/x/crypto v0.26.0 + golang.org/x/crypto v0.31.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 golang.org/x/sync v0.8.0 ) @@ -33,7 +33,9 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/x448/float16 v0.8.4 // indirect - golang.org/x/sys v0.24.0 // indirect + golang.org/x/sys v0.28.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) + +replace github.com/consensys/gnark-crypto => /Users/arya/gnark-crypto diff --git a/go.sum b/go.sum index e4a0449401..51a0a31e84 100644 --- a/go.sum +++ b/go.sum @@ -308,6 +308,8 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -466,6 +468,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg= golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= diff --git a/io/roundtrip.go b/io/roundtrip.go index ecac78afaf..66d38aa7c8 100644 --- a/io/roundtrip.go +++ b/io/roundtrip.go @@ -3,6 +3,7 @@ package io import ( "bytes" "errors" + "fmt" "io" "reflect" ) @@ -21,8 +22,8 @@ func RoundTripCheck(from any, to func() any) error { if err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (ReadFrom)") + if err = equal(from, r); err != nil { + return fmt.Errorf("ReadFrom: %w", err) } if written != read { return errors.New("bytes written / read don't match") @@ -35,8 +36,8 @@ func RoundTripCheck(from any, to func() any) error { if err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (UnsafeReadFrom)") + if err = equal(from, r); err != nil { + return fmt.Errorf("UnsafeReadFrom: %w", err) } if written != read { return errors.New("bytes written / read don't match") @@ -52,6 +53,8 @@ func RoundTripCheck(from any, to func() any) error { return err } + //fmt.Println(base64.StdEncoding.EncodeToString(buf.Bytes()[:written])) + if err := reconstruct(written); err != nil { return err } @@ -85,8 +88,28 @@ func DumpRoundTripCheck(from any, to func() any) error { if err := r.ReadDump(bytes.NewReader(buf.Bytes())); err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (ReadDump)") + if err := equal(from, r); err != nil { + return fmt.Errorf("ReadDump: %w", err) } return nil } + +func equal(a, b any) error { + // check for a custom Equal method + aV := reflect.ValueOf(a) + eq := aV.MethodByName("Equal") + if eq.IsValid() { + res := eq.Call([]reflect.Value{reflect.ValueOf(b)}) + if len(res) != 1 { + return errors.New("`Equal` method must return a single bool") + } + if res[0].Bool() { + return nil + } + return errors.New("reconstructed object does not match the original (custom Equal)") + } + if reflect.DeepEqual(a, b) { + return nil + } + return errors.New("reconstructed object does not match the original (reflect.DeepEqual)") +} From 74de36a62779e8098e47051235ad972a13111b10 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:18:51 -0600 Subject: [PATCH 053/105] feat a couple of helper functions --- internal/utils/test_utils/test_utils.go | 39 ++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/internal/utils/test_utils/test_utils.go b/internal/utils/test_utils/test_utils.go index fcebaf148f..01f5c00a8a 100644 --- a/internal/utils/test_utils/test_utils.go +++ b/internal/utils/test_utils/test_utils.go @@ -1,6 +1,12 @@ package test_utils -import "log" +import ( + "bytes" + "github.com/stretchr/testify/require" + "io" + "log" + "testing" +) var ConditionalLoggerEnabled bool @@ -15,3 +21,34 @@ func ConditionalLogf(format string, v ...any) { log.Printf(format, v...) } } + +// Range (n, startingPoints...) = [startingPoints[0], startingPoints[0]+1, ..., startingPoints[0]+n-1, startingPoints[1], startingPoints[1]+1, ...,] +// or [0, 1, ..., n-1] if startingPoints is empty +func Range(n int, startingPoints ...int) []int { + if len(startingPoints) == 0 { + startingPoints = []int{0} + } + res := make([]int, n*len(startingPoints)) + + for i := range startingPoints { + for j := range n { + res[i*n+j] = startingPoints[i] + j + } + } + + return res +} + +func CopyThruSerialization(t *testing.T, dst, src interface { + io.ReaderFrom + io.WriterTo +}) { + var bb bytes.Buffer + + n, err := src.WriteTo(&bb) + require.NoError(t, err) + require.Equal(t, int64(bb.Len()), n) + n, err = dst.ReadFrom(bytes.NewReader(bb.Bytes())) + require.NoError(t, err) + require.Equal(t, int64(bb.Len()), n) +} From 87fbf176cbe3d4ba59311a93ff91846e861084d8 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:37:32 -0600 Subject: [PATCH 054/105] fix Phase2 verification --- backend/groth16/bn254/mpcsetup/marshal.go | 5 ++- backend/groth16/bn254/mpcsetup/phase1.go | 10 +++--- backend/groth16/bn254/mpcsetup/phase2.go | 23 +++++-------- backend/groth16/bn254/mpcsetup/setup.go | 4 +-- backend/groth16/bn254/mpcsetup/setup_test.go | 21 ++++++------ backend/groth16/bn254/mpcsetup/unit_test.go | 36 ++++++++++++++------ backend/groth16/bn254/mpcsetup/utils.go | 14 ++++---- 7 files changed, 58 insertions(+), 55 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 4c1ab916c1..3511a1a878 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -150,12 +150,11 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { func (c *Phase2Evaluations) refsSlice() []any { N := uint64(len(c.G1.A)) - expectedLen := 3*N + 4 - refs := make([]any, 4, expectedLen) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) refs[0] = &c.G1.CKK refs[1] = &c.G1.VKK refs[2] = &c.PublicAndCommitmentCommitted - refs[3] = &c.NbConstraints refs = appendRefs(refs, c.G1.A) refs = appendRefs(refs, c.G1.B) refs = appendRefs(refs, c.G2.B) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 7097a7e075..81dd54779c 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -44,8 +44,6 @@ type Phase1 struct { Challenge []byte // Hash of the transcript PRIOR to this participant } -// TODO @Tabaie use batch scalar multiplication whenever applicable - // Contribute contributes randomness to the Phase1 object. This mutates Phase1. // p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. func (p *Phase1) Contribute() { @@ -56,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = updateValue(p.parameters.G1.Tau[1], p.Challenge, 1) - p.proofs.Alpha, alphaContrib = updateValue(p.parameters.G1.AlphaTau[0], p.Challenge, 2) - p.proofs.Beta, betaContrib = updateValue(p.parameters.G1.BetaTau[0], p.Challenge, 3) + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -158,7 +156,7 @@ func (p *Phase1) Verify(next *Phase1) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { - return errors.New("the challenge does not match the previous phase's hash") + return errors.New("the challenge does not match the previous contribution's hash") } next.Challenge = challenge diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 957a3f70ab..f826561cec 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -34,7 +34,6 @@ type Phase2Evaluations struct { // TODO @Tabaie rename B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } PublicAndCommitmentCommitted [][]int - NbConstraints uint64 // TODO unnecessary. len(Z) has that information (domain size) } type Phase2 struct { @@ -59,12 +58,10 @@ type Phase2 struct { Challenge []byte } -// TODO @Tabaie use batch scalar multiplication whenever applicable - func (p *Phase2) Verify(next *Phase2) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { - return errors.New("the challenge does not match the previous phase's hash") + return errors.New("the challenge does not match the previous contribution's hash") } next.Challenge = challenge @@ -75,13 +72,13 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) // TODO @Tabaie If all contributions are being verified in one go, we could reuse r + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) g1Denom := linearCombination(g1Denominator, r) - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Denominator}, challenge, dst) + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) } // verify proof of knowledge of contributions to the σᵢ @@ -91,7 +88,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&p.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -104,7 +101,7 @@ func (p *Phase2) Verify(next *Phase2) error { denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&p.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -132,8 +129,7 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { for j := range s { scale(&s[j]) } - point := &p.Parameters.G2.Sigma[i] - point.ScalarMultiplicationBase(&I) + scale(&p.Parameters.G2.Sigma[i]) } delta.BigInt(&I) @@ -155,14 +151,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = updateValue(p.Parameters.G1.Delta, p.Challenge, 1) + p.Delta, delta = newValueUpdate(p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = updateValue(p.Parameters.G1.SigmaCKK[i][0], p.Challenge, byte(2+i)) + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -230,7 +226,6 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation var evals Phase2Evaluations commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) - evals.NbConstraints = uint64(r1cs.GetNbConstraints()) evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings @@ -272,7 +267,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] n := len(commons.G1.AlphaTau) p.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { // TODO @Tabaie why is the last element always 0? + for i := range n - 1 { p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } bitReverse(p.Parameters.G1.Z) diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 715ac7fc3d..4c575abe22 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -13,8 +13,6 @@ import ( groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" ) -// TODO @Tabaie use batch scalar multiplication whenever applicable - // Seal performs the final contribution and outputs the proving and verifying keys. // No randomization is performed at this step. // A verifier should simply re-run this and check @@ -36,7 +34,7 @@ func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChall ) // Initialize PK - pk.Domain = *fft.NewDomain(evals.NbConstraints) // TODO @Tabaie replace with len(Z)+1 + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) pk.G1.Beta.Set(&commons.G1.BetaTau[0]) pk.G1.Delta.Set(&p.Parameters.G1.Delta) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index d5714bca9a..f5ddda2c2e 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -182,22 +182,21 @@ func (circuit *Circuit) Define(api frontend.API) error { } func assignCircuit() frontend.Circuit { - return sync.OnceValue(func() frontend.Circuit { - // Build the witness - var preImage, hash fr.Element - { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) - } - return &Circuit{PreImage: preImage, Hash: hash} - })() + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} + } func getTestCircuit(t *testing.T) *cs.R1CS { return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, assignCircuit()) + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) require.NoError(t, err) return ccs.(*cs.R1CS) })() diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index 35cae3d231..b7d24bb800 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -12,6 +12,7 @@ import ( cs "github.com/consensys/gnark/constraint/bn254" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/internal/utils/test_utils" gnarkio "github.com/consensys/gnark/io" "github.com/stretchr/testify/require" "math/big" @@ -29,7 +30,7 @@ func TestContributionPok(t *testing.T) { ) x0, err := curve.HashToG1([]byte("contribution test"), nil) require.NoError(t, err) - proof, d := updateValue(x0, []byte(pokChallenge), pokDst) + proof, d := newValueUpdate([]byte(pokChallenge), pokDst) var ( x1 curve.G1Affine dI big.Int @@ -438,7 +439,7 @@ func frs(x ...int) []fr.Element { return res } -func TestSerialization(t *testing.T) { +func TestPhase2Serialization(t *testing.T) { testRoundtrip := func(_cs constraint.ConstraintSystem) { var ( @@ -453,15 +454,7 @@ func TestSerialization(t *testing.T) { require.NoError(t, gnarkio.RoundTripCheck(&p2, func() interface{} { return new(Phase2) })) } - /*var p Phase2 - const b64 = "AACNaN0mCOtKUAD0aEvRP0h7pXctaB+w5Mwsb+skm2yDuPzlwTs+qCFf/3INR+fP/lHY6BLnqXyBjAIgCoPxOcSIEG0tcty/TAiaCN3lHCRacU+upLP+WpngByrrxbN9KrhmQLY3mhOHaV5Jo3W9pI2lTpLK9ZjkQpYKd92YCRKkJ9LyX3wqeYR4jQFf1mxtfJSNgluSZUUn3AoUSDmvh8m87TRh/JRcRZnq40BgnhkJ5nHs9siMSmhWGFjGgW/mOqpyrFoZEoK2rP+AT6ylkNGYxMmOBUj0meoeI2FB7RDqcuSxQOL1XK+Pm1dhxND33cykwpTF4oCrqQzSonxQGn+wFNzaYREOmkjCS9i12NbpXNyN2b9YpmujAL/GSD5LAwKNaN0mCOtKUAD0aEvRP0h7pXctaB+w5Mwsb+skm2yDuJ8HrqP1uckhSJCcTOeHMHyh0VqJtnoMhkRAWRPEWcsqIP3sH81riS5ARP1Pv172lVAmfoXnCzwFPNFPnvdSGFk=" - b, err := base64.StdEncoding.DecodeString(b64) - require.NoError(t, err) - n, err := p.ReadFrom(bytes.NewReader(b)) - require.NoError(t, err) - require.Equal(t, int64(len(b)), n)*/ - - _cs, err := frontend.Compile(ecc.BN254.ScalarField(), r1cs.NewBuilder, &tinyCircuit{}) + _cs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &tinyCircuit{}) require.NoError(t, err) testRoundtrip(_cs) @@ -515,3 +508,24 @@ func sliceSliceEqual[T comparable](a, b [][]T) bool { } return true } + +func getSimplePhase2(t *testing.T, circuit frontend.Circuit) Phase2 { + _cs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, circuit) + require.NoError(t, err) + cs := _cs.(*cs.R1CS) + var commons SrsCommons + commons.setOne(ecc.NextPowerOfTwo(uint64(cs.GetNbConstraints()))) + var p Phase2 + p.Initialize(cs, &commons) + return p +} + +func TestPhase2(t *testing.T) { + p0 := getSimplePhase2(t, &Circuit{}) + + var p1 Phase2 + test_utils.CopyThruSerialization(t, &p1, &p0) + p1.Contribute() + + require.NoError(t, p0.Verify(&p1)) +} diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index bac412c893..18d628a9fe 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -17,8 +17,6 @@ import ( "runtime" ) -// TODO @Tabaie use batch scalar multiplication whenever applicable - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -229,18 +227,17 @@ type valueUpdate struct { contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ } -// updateValue produces values associated with contribution to an existing value. +// newValueUpdate produces values associated with contribution to an existing value. // the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { if _, err := contributionValue.SetRandom(); err != nil { panic(err) } var contributionValueI big.Int contributionValue.BigInt(&contributionValueI) - _, _, g1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&g1, &contributionValueI) - value.ScalarMultiplication(&value, &contributionValueI) + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) // proof of knowledge to commitment. Algorithm 3 from section 3.7 pokBase := genR(proof.contributionCommitment, challenge, dst) // r @@ -249,6 +246,9 @@ func updateValue(value curve.G1Affine, challenge []byte, dst byte) (proof valueU return } +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. From 456a706649ac7743b0f727b5f036c27d4487cbe4 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 16:59:06 -0600 Subject: [PATCH 055/105] build go generate tinyfield --- internal/generator/backend/main.go | 2 +- internal/tinyfield/arith.go | 2 +- internal/tinyfield/doc.go | 10 ++- internal/tinyfield/element.go | 60 +++----------- internal/tinyfield/element_purego.go | 112 +++++++++++++++++++++++++++ internal/tinyfield/element_test.go | 28 +------ internal/tinyfield/vector.go | 39 +--------- internal/tinyfield/vector_purego.go | 43 ++++++++++ internal/tinyfield/vector_test.go | 2 +- 9 files changed, 176 insertions(+), 122 deletions(-) create mode 100644 internal/tinyfield/element_purego.go create mode 100644 internal/tinyfield/vector_purego.go diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index d5c8ae0045..30e8e21b87 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -75,7 +75,7 @@ func main() { if err != nil { panic(err) } - if err := generator.GenerateFF(tinyfieldConf, tiny_field.RootPath, "", ""); err != nil { + if err := generator.GenerateFF(tinyfieldConf, tiny_field.RootPath); err != nil { panic(err) } diff --git a/internal/tinyfield/arith.go b/internal/tinyfield/arith.go index 02aedba1ff..2b85fa7ab0 100644 --- a/internal/tinyfield/arith.go +++ b/internal/tinyfield/arith.go @@ -1,4 +1,4 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT diff --git a/internal/tinyfield/doc.go b/internal/tinyfield/doc.go index a8b6fce697..3e92316c85 100644 --- a/internal/tinyfield/doc.go +++ b/internal/tinyfield/doc.go @@ -1,11 +1,13 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT // Package tinyfield contains field arithmetic operations for modulus = 0x2f. // -// The API is similar to math/big (big.Int), but the operations are significantly faster (up to 20x for the modular multiplication on amd64, see also https://hackmd.io/@gnark/modular_multiplication) +// The API is similar to math/big (big.Int), but the operations are significantly faster (up to 20x). +// +// Additionally tinyfield.Vector offers an API to manipulate []Element. // // The modulus is hardcoded in all the operations. // @@ -38,5 +40,7 @@ // // # Warning // -// This code has not been audited and is provided as-is. In particular, there is no security guarantees such as constant time implementation or side-channel attack resistance. +// There is no security guarantees such as constant time implementation or side-channel attack resistance. +// This code is provided as-is. Partially audited, see https://github.com/Consensys/gnark/tree/master/audits +// for more details. package tinyfield diff --git a/internal/tinyfield/element.go b/internal/tinyfield/element.go index 5d7e45ae33..497fabfa95 100644 --- a/internal/tinyfield/element.go +++ b/internal/tinyfield/element.go @@ -1,4 +1,4 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT @@ -43,8 +43,8 @@ const ( // Field modulus q const ( - q0 uint64 = 47 - q uint64 = q0 + q0 = 47 + q = q0 ) var qElement = Element{ @@ -63,7 +63,7 @@ func Modulus() *big.Int { // q + r'.r = 1, i.e., qInvNeg = - q⁻¹ mod r // used for Montgomery reduction -const qInvNeg uint64 = 12559485326780971313 +const qInvNeg = 12559485326780971313 func init() { _modulus.SetString("2f", 16) @@ -338,10 +338,11 @@ func (z *Element) fromMont() *Element { // Add z = x + y (mod q) func (z *Element) Add(x, y *Element) *Element { - z[0], _ = bits.Add64(x[0], y[0], 0) - if z[0] >= q { - z[0] -= q + t := x[0] + y[0] + if t >= q { + t -= q } + z[0] = t return z } @@ -388,49 +389,6 @@ func (z *Element) Select(c int, x0 *Element, x1 *Element) *Element { return z } -// _mulGeneric is unoptimized textbook CIOS -// it is a fallback solution on x86 when ADX instruction set is not available -// and is used for testing purposes. -func _mulGeneric(z, x, y *Element) { - - // Algorithm 2 of "Faster Montgomery Multiplication and Multi-Scalar-Multiplication for SNARKS" - // by Y. El Housni and G. Botrel https://doi.org/10.46586/tches.v2023.i3.504-521 - - var t [2]uint64 - var D uint64 - var m, C uint64 - // ----------------------------------- - // First loop - - C, t[0] = bits.Mul64(y[0], x[0]) - - t[1], D = bits.Add64(t[1], C, 0) - - // m = t[0]n'[0] mod W - m = t[0] * qInvNeg - - // ----------------------------------- - // Second loop - C = madd0(m, q0, t[0]) - - t[0], C = bits.Add64(t[1], C, 0) - t[1], _ = bits.Add64(0, D, C) - - if t[1] != 0 { - // we need to reduce, we have a result on 2 words - z[0], _ = bits.Sub64(t[0], q0, 0) - return - } - - // copy t into z - z[0] = t[0] - - // if z ⩾ q → z -= q - if !z.smallerThanModulus() { - z[0] -= q - } -} - func _fromMontGeneric(z *Element) { // the following lines implement z = z * 1 // with a modified CIOS montgomery multiplication @@ -603,7 +561,7 @@ func (z *Element) Text(base int) string { const maxUint16 = 65535 zz := z.Bits() - return strconv.FormatUint(zz[0], base) + return strconv.FormatUint(uint64(zz[0]), base) } // BigInt sets and return z as a *big.Int diff --git a/internal/tinyfield/element_purego.go b/internal/tinyfield/element_purego.go new file mode 100644 index 0000000000..eefc24dab6 --- /dev/null +++ b/internal/tinyfield/element_purego.go @@ -0,0 +1,112 @@ +// Copyright 2020-2024 Consensys Software Inc. +// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package tinyfield + +import "math/bits" + +// MulBy3 x *= 3 (mod q) +func MulBy3(x *Element) { + var y Element + y.SetUint64(3) + x.Mul(x, &y) +} + +// MulBy5 x *= 5 (mod q) +func MulBy5(x *Element) { + var y Element + y.SetUint64(5) + x.Mul(x, &y) +} + +// MulBy13 x *= 13 (mod q) +func MulBy13(x *Element) { + var y Element + y.SetUint64(13) + x.Mul(x, &y) +} + +func fromMont(z *Element) { + _fromMontGeneric(z) +} + +func reduce(z *Element) { + _reduceGeneric(z) +} + +// Mul z = x * y (mod q) +// +// x and y must be less than q +func (z *Element) Mul(x, y *Element) *Element { + + // In fact, since the modulus R fits on one register, the CIOS algorithm gets reduced to standard REDC (textbook Montgomery reduction): + // hi, lo := x * y + // m := (lo * qInvNeg) mod R + // (*) r := (hi * R + lo + m * q) / R + // reduce r if necessary + + // On the emphasized line, we get r = hi + (lo + m * q) / R + // If we write hi2, lo2 = m * q then R | m * q - lo2 ⇒ R | (lo * qInvNeg) q - lo2 = -lo - lo2 + // This shows lo + lo2 = 0 mod R. i.e. lo + lo2 = 0 if lo = 0 and R otherwise. + // Which finally gives (lo + m * q) / R = (lo + lo2 + R hi2) / R = hi2 + (lo+lo2) / R = hi2 + (lo != 0) + // This "optimization" lets us do away with one MUL instruction on ARM architectures and is available for all q < R. + + hi, lo := bits.Mul64(x[0], y[0]) + if lo != 0 { + hi++ // x[0] * y[0] ≤ 2¹²⁸ - 2⁶⁵ + 1, meaning hi ≤ 2⁶⁴ - 2 so no need to worry about overflow + } + m := lo * qInvNeg + hi2, _ := bits.Mul64(m, q) + r, carry := bits.Add64(hi2, hi, 0) + if carry != 0 || r >= q { + // we need to reduce + r -= q + } + z[0] = r + + return z +} + +// Square z = x * x (mod q) +// +// x must be less than q +func (z *Element) Square(x *Element) *Element { + // see Mul for algorithm documentation + + // In fact, since the modulus R fits on one register, the CIOS algorithm gets reduced to standard REDC (textbook Montgomery reduction): + // hi, lo := x * y + // m := (lo * qInvNeg) mod R + // (*) r := (hi * R + lo + m * q) / R + // reduce r if necessary + + // On the emphasized line, we get r = hi + (lo + m * q) / R + // If we write hi2, lo2 = m * q then R | m * q - lo2 ⇒ R | (lo * qInvNeg) q - lo2 = -lo - lo2 + // This shows lo + lo2 = 0 mod R. i.e. lo + lo2 = 0 if lo = 0 and R otherwise. + // Which finally gives (lo + m * q) / R = (lo + lo2 + R hi2) / R = hi2 + (lo+lo2) / R = hi2 + (lo != 0) + // This "optimization" lets us do away with one MUL instruction on ARM architectures and is available for all q < R. + + hi, lo := bits.Mul64(x[0], x[0]) + if lo != 0 { + hi++ // x[0] * y[0] ≤ 2¹²⁸ - 2⁶⁵ + 1, meaning hi ≤ 2⁶⁴ - 2 so no need to worry about overflow + } + m := lo * qInvNeg + hi2, _ := bits.Mul64(m, q) + r, carry := bits.Add64(hi2, hi, 0) + if carry != 0 || r >= q { + // we need to reduce + r -= q + } + z[0] = r + + return z +} + +// Butterfly sets +// +// a = a + b (mod q) +// b = a - b (mod q) +func Butterfly(a, b *Element) { + _butterflyGeneric(a, b) +} diff --git a/internal/tinyfield/element_test.go b/internal/tinyfield/element_test.go index 64d9667a54..cece30c911 100644 --- a/internal/tinyfield/element_test.go +++ b/internal/tinyfield/element_test.go @@ -1,4 +1,4 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT @@ -921,14 +921,6 @@ func TestElementMul(t *testing.T) { c.Mul(&a.element, &r) d.Mul(&a.bigint, &rb).Mod(&d, Modulus()) - // checking generic impl against asm path - var cGeneric Element - _mulGeneric(&cGeneric, &a.element, &r) - if !cGeneric.Equal(&c) { - // need to give context to failing error. - return false - } - if c.BigInt(&e).Cmp(&d) != 0 { return false } @@ -951,17 +943,6 @@ func TestElementMul(t *testing.T) { genB, )) - properties.Property("Mul: assembly implementation must be consistent with generic one", prop.ForAll( - func(a, b testPairElement) bool { - var c, d Element - c.Mul(&a.element, &b.element) - _mulGeneric(&d, &a.element, &b.element) - return c.Equal(&d) - }, - genA, - genB, - )) - specialValueTest := func() { // test special values against special values testValues := make([]Element, len(staticTestValues)) @@ -980,13 +961,6 @@ func TestElementMul(t *testing.T) { c.Mul(&a, &b) d.Mul(&aBig, &bBig).Mod(&d, Modulus()) - // checking asm against generic impl - var cGeneric Element - _mulGeneric(&cGeneric, &a, &b) - if !cGeneric.Equal(&c) { - t.Fatal("Mul failed special test values: asm and generic impl don't match") - } - if c.BigInt(&e).Cmp(&d) != 0 { t.Fatal("Mul failed special test values") } diff --git a/internal/tinyfield/vector.go b/internal/tinyfield/vector.go index 6b045db8cd..0439c558c1 100644 --- a/internal/tinyfield/vector.go +++ b/internal/tinyfield/vector.go @@ -1,4 +1,4 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT @@ -185,43 +185,6 @@ func (vector Vector) Swap(i, j int) { vector[i], vector[j] = vector[j], vector[i] } -// Add adds two vectors element-wise and stores the result in self. -// It panics if the vectors don't have the same length. -func (vector *Vector) Add(a, b Vector) { - addVecGeneric(*vector, a, b) -} - -// Sub subtracts two vectors element-wise and stores the result in self. -// It panics if the vectors don't have the same length. -func (vector *Vector) Sub(a, b Vector) { - subVecGeneric(*vector, a, b) -} - -// ScalarMul multiplies a vector by a scalar element-wise and stores the result in self. -// It panics if the vectors don't have the same length. -func (vector *Vector) ScalarMul(a Vector, b *Element) { - scalarMulVecGeneric(*vector, a, b) -} - -// Sum computes the sum of all elements in the vector. -func (vector *Vector) Sum() (res Element) { - sumVecGeneric(&res, *vector) - return -} - -// InnerProduct computes the inner product of two vectors. -// It panics if the vectors don't have the same length. -func (vector *Vector) InnerProduct(other Vector) (res Element) { - innerProductVecGeneric(&res, *vector, other) - return -} - -// Mul multiplies two vectors element-wise and stores the result in self. -// It panics if the vectors don't have the same length. -func (vector *Vector) Mul(a, b Vector) { - mulVecGeneric(*vector, a, b) -} - func addVecGeneric(res, a, b Vector) { if len(a) != len(b) || len(a) != len(res) { panic("vector.Add: vectors don't have the same length") diff --git a/internal/tinyfield/vector_purego.go b/internal/tinyfield/vector_purego.go new file mode 100644 index 0000000000..22a2964d1f --- /dev/null +++ b/internal/tinyfield/vector_purego.go @@ -0,0 +1,43 @@ +// Copyright 2020-2024 Consensys Software Inc. +// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. + +// Code generated by consensys/gnark-crypto DO NOT EDIT + +package tinyfield + +// Add adds two vectors element-wise and stores the result in self. +// It panics if the vectors don't have the same length. +func (vector *Vector) Add(a, b Vector) { + addVecGeneric(*vector, a, b) +} + +// Sub subtracts two vectors element-wise and stores the result in self. +// It panics if the vectors don't have the same length. +func (vector *Vector) Sub(a, b Vector) { + subVecGeneric(*vector, a, b) +} + +// ScalarMul multiplies a vector by a scalar element-wise and stores the result in self. +// It panics if the vectors don't have the same length. +func (vector *Vector) ScalarMul(a Vector, b *Element) { + scalarMulVecGeneric(*vector, a, b) +} + +// Sum computes the sum of all elements in the vector. +func (vector *Vector) Sum() (res Element) { + sumVecGeneric(&res, *vector) + return +} + +// InnerProduct computes the inner product of two vectors. +// It panics if the vectors don't have the same length. +func (vector *Vector) InnerProduct(other Vector) (res Element) { + innerProductVecGeneric(&res, *vector, other) + return +} + +// Mul multiplies two vectors element-wise and stores the result in self. +// It panics if the vectors don't have the same length. +func (vector *Vector) Mul(a, b Vector) { + mulVecGeneric(*vector, a, b) +} diff --git a/internal/tinyfield/vector_test.go b/internal/tinyfield/vector_test.go index d17149d308..82867cee93 100644 --- a/internal/tinyfield/vector_test.go +++ b/internal/tinyfield/vector_test.go @@ -1,4 +1,4 @@ -// Copyright 2020-2024 ConsenSys Software Inc. +// Copyright 2020-2024 Consensys Software Inc. // Licensed under the Apache License, Version 2.0. See the LICENSE file for details. // Code generated by consensys/gnark-crypto DO NOT EDIT From cfd344b51248ff394821de91685425ec09fd6164 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:02:19 -0600 Subject: [PATCH 056/105] build generify lagrange --- backend/groth16/bls12-377/mpcsetup/lagrange.go | 4 ++-- backend/groth16/bls12-381/mpcsetup/lagrange.go | 4 ++-- backend/groth16/bls24-315/mpcsetup/lagrange.go | 4 ++-- backend/groth16/bls24-317/mpcsetup/lagrange.go | 4 ++-- backend/groth16/bw6-633/mpcsetup/lagrange.go | 4 ++-- backend/groth16/bw6-761/mpcsetup/lagrange.go | 4 ++-- .../template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl | 4 ++-- 7 files changed, 14 insertions(+), 14 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/lagrange.go b/backend/groth16/bls12-377/mpcsetup/lagrange.go index b240155f12..10fe767fc5 100644 --- a/backend/groth16/bls12-377/mpcsetup/lagrange.go +++ b/backend/groth16/bls12-377/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls12-381/mpcsetup/lagrange.go b/backend/groth16/bls12-381/mpcsetup/lagrange.go index 34d3b1d040..dceec7b769 100644 --- a/backend/groth16/bls12-381/mpcsetup/lagrange.go +++ b/backend/groth16/bls12-381/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls24-315/mpcsetup/lagrange.go b/backend/groth16/bls24-315/mpcsetup/lagrange.go index 68a642863b..778e55197e 100644 --- a/backend/groth16/bls24-315/mpcsetup/lagrange.go +++ b/backend/groth16/bls24-315/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls24-317/mpcsetup/lagrange.go b/backend/groth16/bls24-317/mpcsetup/lagrange.go index ad78a233ad..1cb535af35 100644 --- a/backend/groth16/bls24-317/mpcsetup/lagrange.go +++ b/backend/groth16/bls24-317/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bw6-633/mpcsetup/lagrange.go b/backend/groth16/bw6-633/mpcsetup/lagrange.go index 3a50fc463e..98abca8452 100644 --- a/backend/groth16/bw6-633/mpcsetup/lagrange.go +++ b/backend/groth16/bw6-633/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bw6-761/mpcsetup/lagrange.go b/backend/groth16/bw6-761/mpcsetup/lagrange.go index d90e04b4f6..9c01416d5f 100644 --- a/backend/groth16/bw6-761/mpcsetup/lagrange.go +++ b/backend/groth16/bw6-761/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl index 348c567418..c96f439aa8 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl @@ -70,7 +70,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -98,7 +98,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) From 3d7e184543c26257297128a418058e4045669c6a Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:06:28 -0600 Subject: [PATCH 057/105] refactor AppendRefs --- backend/groth16/bn254/mpcsetup/marshal.go | 26 +++++++++-------------- internal/utils/slices.go | 9 ++++++++ 2 files changed, 19 insertions(+), 16 deletions(-) create mode 100644 internal/utils/slices.go diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 3511a1a878..e8125b17d1 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -8,16 +8,10 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark/internal/utils" "io" ) -func appendRefs[T any](s []any, v []T) []any { - for i := range v { - s = append(s, &v[i]) - } - return s -} - // WriteTo implements io.WriterTo // It does not write the Challenge from the previous contribution func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { @@ -71,8 +65,8 @@ func (p *Phase2) refsSlice() []any { refs[3] = &p.Parameters.G1.Z // unique size: N-1 refs[4] = &p.Parameters.G2.Delta - refs = appendRefs(refs, p.Parameters.G1.SigmaCKK) - refs = appendRefs(refs, p.Parameters.G2.Sigma) + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) if len(refs) != expectedLen { panic("incorrect length estimate") @@ -155,9 +149,9 @@ func (c *Phase2Evaluations) refsSlice() []any { refs[0] = &c.G1.CKK refs[1] = &c.G1.VKK refs[2] = &c.PublicAndCommitmentCommitted - refs = appendRefs(refs, c.G1.A) - refs = appendRefs(refs, c.G1.B) - refs = appendRefs(refs, c.G2.B) + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) if uint64(len(refs)) != expectedLen { panic("incorrect length estimate") @@ -214,10 +208,10 @@ func (c *SrsCommons) refsSlice() []any { refs := make([]any, 2, expectedLen) refs[0] = N refs[1] = &c.G2.Beta - refs = appendRefs(refs, c.G1.Tau[1:]) - refs = appendRefs(refs, c.G2.Tau[1:]) - refs = appendRefs(refs, c.G1.BetaTau) - refs = appendRefs(refs, c.G1.AlphaTau) + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) if uint64(len(refs)) != expectedLen { panic("incorrect length estimate") diff --git a/internal/utils/slices.go b/internal/utils/slices.go new file mode 100644 index 0000000000..9c50ed9b01 --- /dev/null +++ b/internal/utils/slices.go @@ -0,0 +1,9 @@ +package utils + +// AppendRefs returns append(s, &v[0], &v[1], ...). +func AppendRefs[T any](s []any, v []T) []any { + for i := range v { + s = append(s, &v[i]) + } + return s +} From 054c42ef02b08fb8e989f93d5ecac28431309d43 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:09:06 -0600 Subject: [PATCH 058/105] build generify marshal --- backend/groth16/bls12-377/mpcsetup/marshal.go | 289 +++++++++++------ backend/groth16/bls12-381/mpcsetup/marshal.go | 289 +++++++++++------ backend/groth16/bls24-315/mpcsetup/marshal.go | 289 +++++++++++------ backend/groth16/bls24-317/mpcsetup/marshal.go | 289 +++++++++++------ backend/groth16/bw6-633/mpcsetup/marshal.go | 289 +++++++++++------ backend/groth16/bw6-761/mpcsetup/marshal.go | 289 +++++++++++------ .../groth16/mpcsetup/marshal.go.tmpl | 291 ++++++++++++------ 7 files changed, 1352 insertions(+), 673 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/marshal.go b/backend/groth16/bls12-377/mpcsetup/marshal.go index aab5af4057..d2b4bf3ab9 100644 --- a/backend/groth16/bls12-377/mpcsetup/marshal.go +++ b/backend/groth16/bls12-377/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bls12-381/mpcsetup/marshal.go b/backend/groth16/bls12-381/mpcsetup/marshal.go index 47c750583c..1889993f18 100644 --- a/backend/groth16/bls12-381/mpcsetup/marshal.go +++ b/backend/groth16/bls12-381/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bls24-315/mpcsetup/marshal.go b/backend/groth16/bls24-315/mpcsetup/marshal.go index 7c994e08fe..a324c2c11c 100644 --- a/backend/groth16/bls24-315/mpcsetup/marshal.go +++ b/backend/groth16/bls24-315/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bls24-317/mpcsetup/marshal.go b/backend/groth16/bls24-317/mpcsetup/marshal.go index 19706282c7..d00cb1d5d9 100644 --- a/backend/groth16/bls24-317/mpcsetup/marshal.go +++ b/backend/groth16/bls24-317/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bw6-633/mpcsetup/marshal.go b/backend/groth16/bw6-633/mpcsetup/marshal.go index 9c4cc27acb..2cd0c0f03f 100644 --- a/backend/groth16/bw6-633/mpcsetup/marshal.go +++ b/backend/groth16/bw6-633/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/backend/groth16/bw6-761/mpcsetup/marshal.go b/backend/groth16/bw6-761/mpcsetup/marshal.go index 27477cd27b..fdfac89464 100644 --- a/backend/groth16/bw6-761/mpcsetup/marshal.go +++ b/backend/groth16/bw6-761/mpcsetup/marshal.go @@ -6,165 +6,262 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } + } + return +} + +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl index 3af994ae04..89398bf704 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl @@ -1,165 +1,262 @@ import ( "io" - + "encoding/binary" + "github.com/consensys/gnark/internal/utils" {{- template "import_curve" . }} ) + // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]valueUpdate, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) - for _, v := range toEncode { + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) + + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } + +func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { + enc := curve.NewEncoder(writer) + if err = enc.Encode(&x.contributionCommitment); err != nil { + return enc.BytesWritten(), err + } + err = enc.Encode(&x.contributionPok) + return enc.BytesWritten(), err +} + +func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { + dec := curve.NewDecoder(reader) + if err = dec.Decode(&x.contributionCommitment); err != nil { + return dec.BytesRead(), err + } + err = dec.Decode(&x.contributionPok) + return dec.BytesRead(), err +} \ No newline at end of file From 914a075ffbb9e7fcbdb153956881885eb74788f5 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:11:53 -0600 Subject: [PATCH 059/105] refactor: delete marshal_test --- .../bls12-377/mpcsetup/marshal_test.go | 42 ------------------- .../bls12-381/mpcsetup/marshal_test.go | 42 ------------------- .../bls24-315/mpcsetup/marshal_test.go | 42 ------------------- .../bls24-317/mpcsetup/marshal_test.go | 42 ------------------- .../groth16/bw6-633/mpcsetup/marshal_test.go | 42 ------------------- .../groth16/bw6-761/mpcsetup/marshal_test.go | 42 ------------------- internal/generator/backend/main.go | 1 - .../groth16/mpcsetup/marshal_test.go.tmpl | 37 ---------------- 8 files changed, 290 deletions(-) delete mode 100644 backend/groth16/bls12-377/mpcsetup/marshal_test.go delete mode 100644 backend/groth16/bls12-381/mpcsetup/marshal_test.go delete mode 100644 backend/groth16/bls24-315/mpcsetup/marshal_test.go delete mode 100644 backend/groth16/bls24-317/mpcsetup/marshal_test.go delete mode 100644 backend/groth16/bw6-633/mpcsetup/marshal_test.go delete mode 100644 backend/groth16/bw6-761/mpcsetup/marshal_test.go delete mode 100644 internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl diff --git a/backend/groth16/bls12-377/mpcsetup/marshal_test.go b/backend/groth16/bls12-377/mpcsetup/marshal_test.go deleted file mode 100644 index af045351c1..0000000000 --- a/backend/groth16/bls12-377/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls12-377" - cs "github.com/consensys/gnark/constraint/bls12-377" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls12-381/mpcsetup/marshal_test.go b/backend/groth16/bls12-381/mpcsetup/marshal_test.go deleted file mode 100644 index 7104c8eac5..0000000000 --- a/backend/groth16/bls12-381/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls12-381" - cs "github.com/consensys/gnark/constraint/bls12-381" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls24-315/mpcsetup/marshal_test.go b/backend/groth16/bls24-315/mpcsetup/marshal_test.go deleted file mode 100644 index ffd1631d71..0000000000 --- a/backend/groth16/bls24-315/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls24-315" - cs "github.com/consensys/gnark/constraint/bls24-315" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls24-317/mpcsetup/marshal_test.go b/backend/groth16/bls24-317/mpcsetup/marshal_test.go deleted file mode 100644 index 15c39e9e1d..0000000000 --- a/backend/groth16/bls24-317/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls24-317" - cs "github.com/consensys/gnark/constraint/bls24-317" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bw6-633/mpcsetup/marshal_test.go b/backend/groth16/bw6-633/mpcsetup/marshal_test.go deleted file mode 100644 index a0ac908332..0000000000 --- a/backend/groth16/bw6-633/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bw6-633" - cs "github.com/consensys/gnark/constraint/bw6-633" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bw6-761/mpcsetup/marshal_test.go b/backend/groth16/bw6-761/mpcsetup/marshal_test.go deleted file mode 100644 index 2b202dd536..0000000000 --- a/backend/groth16/bw6-761/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2024 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bw6-761" - cs "github.com/consensys/gnark/constraint/bw6-761" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index 30e8e21b87..ed5fa977ea 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -177,7 +177,6 @@ func main() { entries = []bavard.Entry{ {File: filepath.Join(groth16MpcSetupDir, "lagrange.go"), Templates: []string{"groth16/mpcsetup/lagrange.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "marshal.go"), Templates: []string{"groth16/mpcsetup/marshal.go.tmpl", importCurve}}, - {File: filepath.Join(groth16MpcSetupDir, "marshal_test.go"), Templates: []string{"groth16/mpcsetup/marshal_test.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "phase1.go"), Templates: []string{"groth16/mpcsetup/phase1.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "phase2.go"), Templates: []string{"groth16/mpcsetup/phase2.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "setup.go"), Templates: []string{"groth16/mpcsetup/setup.go.tmpl", importCurve}}, diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl deleted file mode 100644 index eaf6293777..0000000000 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl +++ /dev/null @@ -1,37 +0,0 @@ -import ( - "testing" - - gnarkio "github.com/consensys/gnark/io" - - {{- template "import_curve" . }} - {{- template "import_backend_cs" . }} - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} - From e41826374fcb87ddf2dfafbcd79fc7c13182927a Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:18:16 -0600 Subject: [PATCH 060/105] build generify phase1 --- backend/groth16/bls12-377/mpcsetup/phase1.go | 369 +++++++++++------- backend/groth16/bls12-381/mpcsetup/phase1.go | 369 +++++++++++------- backend/groth16/bls24-315/mpcsetup/phase1.go | 369 +++++++++++------- backend/groth16/bls24-317/mpcsetup/phase1.go | 369 +++++++++++------- backend/groth16/bn254/mpcsetup/phase1.go | 2 - backend/groth16/bw6-633/mpcsetup/phase1.go | 369 +++++++++++------- backend/groth16/bw6-761/mpcsetup/phase1.go | 369 +++++++++++------- .../groth16/mpcsetup/phase1.go.tmpl | 369 +++++++++++------- 8 files changed, 1603 insertions(+), 982 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index 724ac887b6..b6215462ac 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index be297e477f..213744f7bc 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index c61a807625..e96928265d 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 774f1a6aa8..c339bf3412 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 81dd54779c..a9c5935df1 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -13,7 +13,6 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "github.com/consensys/gnark/internal/utils/test_utils" "math/big" ) @@ -175,7 +174,6 @@ func (p *Phase1) Verify(next *Phase1) error { if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - test_utils.ConditionalLoggerEnabled = false if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 43d1af3a0a..08b774b136 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 9d3257052e..88754b5067 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -6,187 +6,276 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" - "math" "math/big" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 7c8c6fa540..36234ee881 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -1,187 +1,276 @@ import ( "crypto/sha256" "errors" - "math" + "fmt" + "github.com/consensys/gnark-crypto/ecc" "math/big" + "bytes" {{- template "import_fr" . }} {{- template "import_curve" . }} ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { // "main" contributions + Tau, Alpha, Beta valueUpdate } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) + p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) + p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { + return errors.New("derived values 𝔾₁ subgroup check failed") } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { + return errors.New("derived values 𝔾₂ subgroup check failed") } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") - } + return multiValueUpdateCheck( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) +} + +// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l +// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ +func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { + // lemma: let K be a field and + // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ + // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ + // polynomials in K[X,Y,Z]. + // if F/F' = G/G' + // then F/F' = G/G' ∈ K + // + // view our polynomials in K[X,Y,Z] + // By multiplying out the polynomials we get + // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ + // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 + // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ + // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ + // fᵢⱼ = x f'ᵢⱼ + // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ + // gᵢ = x g'ᵢ + + // now we use this to check that: + // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ + // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ + // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ + // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ + + // construct the polynomials + // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² + // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² + // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² + // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² + + // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: + // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ + // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ + + // from previous checks we already know: + // 1. a₀ = 1 + // 2. b₀ = 1 + // 3. c₀ = α + // 4. d₀ = β + // and so the desired results follow + + ends := partialSums(len(a), len(c), len(d)) + + g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) + g1s = append(g1s, a...) + g1s = append(g1s, c...) + g1s = append(g1s, d...) + + g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) + g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + + if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + return errors.New("multi-value update check failed") } return nil + } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} From 358158bb3d8a445d37e5b7ef26a5205855eeb4f3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:23:37 -0600 Subject: [PATCH 061/105] build generify regular setup changes --- backend/groth16/bls12-377/setup.go | 26 +++++------- backend/groth16/bls12-381/setup.go | 26 +++++------- backend/groth16/bls24-315/setup.go | 26 +++++------- backend/groth16/bls24-317/setup.go | 26 +++++------- backend/groth16/bw6-633/setup.go | 26 +++++------- backend/groth16/bw6-761/setup.go | 26 +++++------- .../zkpschemes/groth16/groth16.setup.go.tmpl | 40 ++++++++----------- 7 files changed, 70 insertions(+), 126 deletions(-) diff --git a/backend/groth16/bls12-377/setup.go b/backend/groth16/bls12-377/setup.go index 8b599d571e..37ca18a2f0 100644 --- a/backend/groth16/bls12-377/setup.go +++ b/backend/groth16/bls12-377/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls12-381/setup.go b/backend/groth16/bls12-381/setup.go index 9c4eb95927..9a8d7c53bf 100644 --- a/backend/groth16/bls12-381/setup.go +++ b/backend/groth16/bls12-381/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls24-315/setup.go b/backend/groth16/bls24-315/setup.go index bb126cc3ed..dcedeba04a 100644 --- a/backend/groth16/bls24-315/setup.go +++ b/backend/groth16/bls24-315/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls24-317/setup.go b/backend/groth16/bls24-317/setup.go index 9b42e4e305..3832928f17 100644 --- a/backend/groth16/bls24-317/setup.go +++ b/backend/groth16/bls24-317/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bw6-633/setup.go b/backend/groth16/bw6-633/setup.go index f7bf9f778f..e623b094ce 100644 --- a/backend/groth16/bw6-633/setup.go +++ b/backend/groth16/bw6-633/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bw6-761/setup.go b/backend/groth16/bw6-761/setup.go index 59943fb933..e369539aa6 100644 --- a/backend/groth16/bw6-761/setup.go +++ b/backend/groth16/bw6-761/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl index c184bb2829..7698931372 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl @@ -12,6 +12,7 @@ import ( "math/bits" ) + // ProvingKey is used by a Groth16 prover to encode a proof of a statement // Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf type ProvingKey struct { @@ -59,7 +60,7 @@ type VerifyingKey struct { // e(α, β) e curve.GT // not serialized - CommitmentKeys []pedersen.VerifyingKey + CommitmentKeys []pedersen.VerifyingKey PublicAndCommitmentCommitted [][]int // indexes of public/commitment committed variables } @@ -126,7 +127,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -138,37 +139,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { @@ -245,7 +238,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { offset += len(B) bitReverse(g1PointsAff[offset : offset+int(domain.Cardinality)]) - sizeZ := int(domain.Cardinality)-1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 + sizeZ := int(domain.Cardinality) - 1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 pk.G1.Z = g1PointsAff[offset : offset+sizeZ] offset += int(domain.Cardinality) @@ -321,7 +314,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vk.G1.Delta = pk.G1.Delta if err := vk.Precompute(); err != nil { - return err + return err } // set domain @@ -404,7 +397,7 @@ func setupABC(r1cs *cs.R1CS, domain *fft.Domain, toxicWaste toxicWaste) (A []fr. j := 0 it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { for _, t := range c.L { accumulate(&A[t.WireID()], t, &L) } @@ -419,7 +412,7 @@ func setupABC(r1cs *cs.R1CS, domain *fft.Domain, toxicWaste toxicWaste) (A []fr. L.Mul(&L, &w) L.Mul(&L, &t[j]) L.Mul(&L, &tInv[j+1]) - + j++ } @@ -557,7 +550,7 @@ func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { } } - pk.CommitmentKeys,_, err = pedersen.Setup(commitmentBases) + pk.CommitmentKeys, _, err = pedersen.Setup(commitmentBases) if err != nil { return err } @@ -575,7 +568,7 @@ func dummyInfinityCount(r1cs *cs.R1CS) (nbZeroesA, nbZeroesB int) { B := make([]bool, nbWires) it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { for _, t := range c.L { A[t.WireID()] = true } @@ -584,7 +577,6 @@ func dummyInfinityCount(r1cs *cs.R1CS) (nbZeroesA, nbZeroesB int) { } } - for i := 0; i < nbWires; i++ { if !A[i] { nbZeroesA++ From 5b26861c566fcb394842f98b0adb64025bf194e7 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:26:24 -0600 Subject: [PATCH 062/105] build generify utils --- backend/groth16/bls12-377/mpcsetup/utils.go | 398 ++++++++++++++---- backend/groth16/bls12-381/mpcsetup/utils.go | 398 ++++++++++++++---- backend/groth16/bls24-315/mpcsetup/utils.go | 398 ++++++++++++++---- backend/groth16/bls24-317/mpcsetup/utils.go | 398 ++++++++++++++---- backend/groth16/bw6-633/mpcsetup/utils.go | 398 ++++++++++++++---- backend/groth16/bw6-761/mpcsetup/utils.go | 398 ++++++++++++++---- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 393 +++++++++++++---- 7 files changed, 2237 insertions(+), 544 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index a0757bc065..7d94f0e4ae 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 6868c0278f..07ba042c23 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 0bd4cdd3cb..0cd37ad76e 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 69dbbdefac..e36f24d274 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 858e911e30..7fb5a5f554 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index c6df8a75ff..f2b73774e3 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -7,45 +7,16 @@ package mpcsetup import ( "bytes" - "math/big" - "math/bits" - "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "runtime" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,18 +29,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -79,8 +65,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -90,66 +81,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -157,3 +209,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index f7c67c036d..30ee2d42b0 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -3,43 +3,13 @@ import ( "math/big" "math/bits" "runtime" - + "errors" "github.com/consensys/gnark-crypto/ecc" - {{- template "import_fr" . }} {{- template "import_curve" . }} "github.com/consensys/gnark/internal/utils" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -52,18 +22,33 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { +func linearCombCoeffs(n int) []fr.Element { + return bivariateRandomMonomials(n) +} + +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, n int) []fr.Element { + result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) + if n >= 1 { + result[0].SetOne() + } + if n >= 2 { + result[1].Set(a) + } + for i := 2; i < n; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -73,8 +58,13 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { + /*if a[0].IsOne() { + A = A[1:] + a = a[1:] + }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -84,66 +74,127 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) +// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. +func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { + var nd1 curve.G1Affine + nd1.Neg(&d1) res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) + []curve.G1Affine{n1, nd1}, + []curve.G2Affine{d2, n2}) if err != nil { panic(err) } return res } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { +// returns ∑ rᵢAᵢ +func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() + var res curve.G1Affine + if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { + panic(err) } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return + return res } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG1 returns +// +// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] +// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] +// .... (truncated) +// +// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] +// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] +// .... (shifted) +// +// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. +// Also assumed that powers[0] = 1. +// The slices powers and A will be modified +func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { + if ends[len(ends)-1] != len(A) || len(A) != len(powers) { + panic("lengths mismatch") + } + + // zero out the large coefficients + for i := range ends { + powers[ends[i]-1].SetZero() + } + + msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} + + if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { + panic(err) + } + + var rInvNeg fr.Element + rInvNeg.Inverse(&powers[1]) + rInvNeg.Neg(&rInvNeg) + prevEnd := 0 + + // r⁻¹.truncated = + // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] + // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] + // ... + // + // compute shifted as + // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... + // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... + // + r⁻¹.truncated + for i := range ends { + powers[2*i].Mul(&powers[prevEnd], &rInvNeg) + powers[2*i+1] = powers[ends[i]-2] + A[2*i] = A[prevEnd] + A[2*i+1] = A[ends[i]-1] + prevEnd = ends[i] } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated + A[2*len(ends)] = truncated + + // TODO @Tabaie O(1) MSM worth it? + if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { + panic(err) + } + return } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() +// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i +// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). +// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ +func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { + + N := len(A) + + if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { + panic(err) } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) + + // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] + var ( + x fr.Element + i big.Int + ) + x.Neg(&rPowers[N-2]) + x.BigInt(&i) + truncated. + ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] + Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] + + rPowers[1].BigInt(&i) + truncated. + ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] + Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] + return } -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { +// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) +// it is to be used as a challenge for generating a proof of knowledge to x +// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) +func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) if err != nil { @@ -151,3 +202,193 @@ func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { } return spG2 } + +type pair struct { + g1 curve.G1Affine + g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. +} + +// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero +func (p *pair) validUpdate() bool { + // if the contribution is 0 the product is doomed to be 0. + // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail + return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) +} + +type valueUpdate struct { + contributionCommitment curve.G1Affine // x or [Xⱼ]₁ + contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ +} + +// newValueUpdate produces values associated with contribution to an existing value. +// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. +func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { + if _, err := contributionValue.SetRandom(); err != nil { + panic(err) + } + var contributionValueI big.Int + contributionValue.BigInt(&contributionValueI) + + _, _, gen1, _ := curve.Generators() + proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) + + // proof of knowledge to commitment. Algorithm 3 from section 3.7 + pokBase := genR(proof.contributionCommitment, challenge, dst) // r + proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) + + return +} + +// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) +// option for linear combination vector + +// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 +// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution +// and previous commitment makes the new commitment. +// prevCommitment is assumed to be valid. No subgroup check and the like. +func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { + noG2 := denom.g2 == nil + if noG2 != (num.g2 == nil) { + return errors.New("erasing or creating g2 values") + } + + if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { + return errors.New("contribution values subgroup check failed") + } + + // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 + r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base + _, _, g1, _ := curve.Generators() + if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + return errors.New("contribution proof of knowledge verification failed") + } + + // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. + if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + return errors.New("g2 update inconsistent") + } + + // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) + // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values + if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + return errors.New("g1 update inconsistent") + } + + return nil +} + +func toRefs[T any](s []T) []*T { + res := make([]*T, len(s)) + for i := range s { + res[i] = &s[i] + } + return res +} + +func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true +} + +func areInSubGroupG1(s []curve.G1Affine) bool { + return areInSubGroup(toRefs(s)) +} + +func areInSubGroupG2(s []curve.G2Affine) bool { + return areInSubGroup(toRefs(s)) +} + +// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... +// all concatenated in the same slice +func bivariateRandomMonomials(ends ...int) []fr.Element { + if len(ends) == 0 { + return nil + } + + res := make([]fr.Element, ends[len(ends)-1]) + if _, err := res[1].SetRandom(); err != nil { + panic(err) + } + setPowers(res[:ends[0]]) + + if len(ends) == 1 { + return res + } + + y := make([]fr.Element, len(ends)) + if _, err := y[1].SetRandom(); err != nil { + panic(err) + } + setPowers(y) + + for d := 1; d < len(ends); d++ { + xdeg := ends[d] - ends[d-1] + if xdeg > ends[0] { + panic("impl detail: first maximum degree for x must be the greatest") + } + + for i := range xdeg { + res[ends[d-1]+i].Mul(&res[i], &y[d]) + } + } + + return res +} + +// sets x[i] = x[1]ⁱ +func setPowers(x []fr.Element) { + if len(x) == 0 { + return + } + x[0].SetOne() + for i := 2; i < len(x); i++ { + x[i].Mul(&x[i-1], &x[1]) + } +} + +func partialSums(s ...int) []int { + if len(s) == 0 { + return nil + } + sums := make([]int, len(s)) + sums[0] = s[0] + for i := 1; i < len(s); i++ { + sums[i] = sums[i-1] + s[i] + } + return sums +} + +func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { + var ( + bb bytes.Buffer + err error + ) + bb.Grow(len(hash) + len(beaconChallenge)) + bb.Write(hash) + bb.Write(beaconChallenge) + + res := make([]fr.Element, 1) + + allNonZero := func() bool { + for i := range res { + if res[i].IsZero() { + return false + } + } + return true + } + + // cryptographically unlikely for this to be run more than once + for !allNonZero() { + if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { + panic(err) + } + bb.WriteByte('=') // padding just so that the hash is different next time + } + + return res +} \ No newline at end of file From 181d0d7d2c767f9977d56a83265aa54e2c9996e3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:30:54 -0600 Subject: [PATCH 063/105] build generify phase2 --- backend/groth16/bls12-377/mpcsetup/phase2.go | 348 +++++++++++------ backend/groth16/bls12-381/mpcsetup/phase2.go | 348 +++++++++++------ backend/groth16/bls24-315/mpcsetup/phase2.go | 348 +++++++++++------ backend/groth16/bls24-317/mpcsetup/phase2.go | 348 +++++++++++------ backend/groth16/bn254/mpcsetup/phase2.go | 7 +- backend/groth16/bw6-633/mpcsetup/phase2.go | 348 +++++++++++------ backend/groth16/bw6-761/mpcsetup/phase2.go | 348 +++++++++++------ .../groth16/mpcsetup/phase2.go.tmpl | 351 ++++++++++++------ 8 files changed, 1593 insertions(+), 853 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index bb4ab9a05e..b5647d47a7 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-377" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 09610500c6..b9af189768 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-381" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 8d11d19a06..6fb1c91dee 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-315" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 5521eadb9b..305154621c 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-317" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index f826561cec..6f30012d96 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -10,15 +10,14 @@ import ( "crypto/sha256" "errors" "fmt" + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" + "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bn254" "math/big" "slices" - - curve "github.com/consensys/gnark-crypto/ecc/bn254" - "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "github.com/consensys/gnark/constraint" ) // Phase2Evaluations components of the circuit keys diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 10160fc8ea..530dbcfd55 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-633" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index ef9d99afc2..c5cfad1b23 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -6,48 +6,174 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-761" + "math/big" + "slices" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +215,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +259,101 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 0afb32db79..2bf6316a5f 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -1,48 +1,173 @@ import ( + "bytes" "crypto/sha256" "errors" "math/big" - + "fmt" + "slices" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" - {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []valueUpdate + Delta valueUpdate + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + + verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { + g1Num := linearCombination(g1Numerator, r) + g1Denom := linearCombination(g1Denominator, r) + + return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { + return errors.New("commitment proving key subgroup check failed") + } + + if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) + } + } + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { + return errors.New("derived values 𝔾₁ subgroup check failed") + } + + denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) + num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) + if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") } } - PublicKey PublicKey - Hash []byte + + for i := range sigma { + sigma[i].BigInt(&I) + s := p.Parameters.G1.SigmaCKK[i] + for j := range s { + scale(&s[j]) + } + scale(&p.Parameters.G2.Sigma[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + for i := range p.Parameters.G1.Z { + scale(&p.Parameters.G1.Z[i]) + } + for i := range p.Parameters.G1.PKK { + scale(&p.Parameters.G1.PKK[i]) + } } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta, delta = newValueUpdate(p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -84,26 +209,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -121,128 +250,104 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } i++ } - + // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]valueUpdate, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is the output of the random beacon +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + for i := range c { + if err := prev.Verify(c[i]); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + p.WriteTo(sha) + sha.Write(p.Challenge) return sha.Sum(nil) } + +func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { + l := 0 + for _, s := range s { + l += len(s) + } + res := make([]curve.G1Affine, 0, l) + for _, s := range s { + res = append(res, s...) + } + return res +} \ No newline at end of file From bfe1254e058cb9cff73e217a1dec9ab481dd2fc8 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:34:24 -0600 Subject: [PATCH 064/105] build generify setup --- backend/groth16/bls12-377/mpcsetup/setup.go | 62 +++++++++++++----- backend/groth16/bls12-381/mpcsetup/setup.go | 62 +++++++++++++----- backend/groth16/bls24-315/mpcsetup/setup.go | 62 +++++++++++++----- backend/groth16/bls24-317/mpcsetup/setup.go | 62 +++++++++++++----- backend/groth16/bw6-633/mpcsetup/setup.go | 62 +++++++++++++----- backend/groth16/bw6-761/mpcsetup/setup.go | 62 +++++++++++++----- .../zkpschemes/groth16/mpcsetup/setup.go.tmpl | 64 ++++++++++++++----- 7 files changed, 323 insertions(+), 113 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/setup.go b/backend/groth16/bls12-377/mpcsetup/setup.go index 1796c00203..0b1a5afd62 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup.go +++ b/backend/groth16/bls12-377/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-377" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls12-381/mpcsetup/setup.go b/backend/groth16/bls12-381/mpcsetup/setup.go index 64e42f4889..48708f63fb 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup.go +++ b/backend/groth16/bls12-381/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-381" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls24-315/mpcsetup/setup.go b/backend/groth16/bls24-315/mpcsetup/setup.go index 26ca9fcc78..be7e152ccd 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup.go +++ b/backend/groth16/bls24-315/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-315" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls24-317/mpcsetup/setup.go b/backend/groth16/bls24-317/mpcsetup/setup.go index c4620fb3c8..42b6c9e4b7 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup.go +++ b/backend/groth16/bls24-317/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-317" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bw6-633/mpcsetup/setup.go b/backend/groth16/bw6-633/mpcsetup/setup.go index 811e75da67..c5036ffa21 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup.go +++ b/backend/groth16/bw6-633/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bw6-633" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-633" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bw6-761/mpcsetup/setup.go b/backend/groth16/bw6-761/mpcsetup/setup.go index c16abc93b2..bebfdca2dc 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup.go +++ b/backend/groth16/bw6-761/mpcsetup/setup.go @@ -8,23 +8,42 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-761" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +88,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl index e60410b467..89645c1154 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl @@ -1,24 +1,43 @@ import ( - groth16 "github.com/consensys/gnark/backend/groth16/{{toLower .Curve}}" + groth16Impl "github.com/consensys/gnark/backend/groth16/{{toLower .Curve}}" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/fr/pedersen" {{- template "import_curve" . }} {{- template "import_fft" . }} ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// The inner workings of the random beacon are out of scope. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -63,18 +82,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk -} + return &pk, &vk +} \ No newline at end of file From 198da47ae2bfc1d6ddf14b1d333f0c0e46c22bc0 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:38:13 -0600 Subject: [PATCH 065/105] fix benchmarking --- backend/groth16/bn254/mpcsetup/setup_test.go | 18 ++++++++++-------- .../groth16/mpcsetup/setup_test.go.tmpl | 5 +++++ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index f5ddda2c2e..16fe9b265d 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -104,7 +104,6 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { proveVerifyCircuit(t, pk, vk) } -/* func BenchmarkPhase1(b *testing.B) { const power = 14 @@ -129,9 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - var srs1 Phase1 - srs1.Initialize(1 << power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -142,22 +142,24 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) } -*/ + // Circuit defines a pre-image knowledge proof // mimc(secret preImage) = public hash type Circuit struct { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl index a36c0c1b9d..20c749a4f1 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl @@ -1,10 +1,15 @@ import ( "testing" + "bytes" + "github.com/consensys/gnark-crypto/ecc" {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} + "io" + "slices" + "sync" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" From 757df84eaff582d1ecf8a5133bdbaf4c4a84c98b Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:40:03 -0600 Subject: [PATCH 066/105] build generify setup_test --- .../groth16/bls12-377/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/bls12-381/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/bls24-315/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/bls24-317/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/bw6-633/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/bw6-761/mpcsetup/setup_test.go | 187 ++++++++++------- .../groth16/mpcsetup/setup_test.go.tmpl | 192 ++++++++++-------- 7 files changed, 769 insertions(+), 545 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/setup_test.go b/backend/groth16/bls12-377/mpcsetup/setup_test.go index 24204f31ca..9db22ddaca 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-377/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" cs "github.com/consensys/gnark/constraint/bls12-377" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls12-381/mpcsetup/setup_test.go b/backend/groth16/bls12-381/mpcsetup/setup_test.go index 37a20a1d8f..23ac6ce164 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-381/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" cs "github.com/consensys/gnark/constraint/bls12-381" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls24-315/mpcsetup/setup_test.go b/backend/groth16/bls24-315/mpcsetup/setup_test.go index 5a8b6d63de..37ceac87cb 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-315/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" cs "github.com/consensys/gnark/constraint/bls24-315" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls24-317/mpcsetup/setup_test.go b/backend/groth16/bls24-317/mpcsetup/setup_test.go index 98e466aa90..acaa7f854f 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-317/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" cs "github.com/consensys/gnark/constraint/bls24-317" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bw6-633/mpcsetup/setup_test.go b/backend/groth16/bw6-633/mpcsetup/setup_test.go index 18c1e27454..c7c44b2184 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-633/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" cs "github.com/consensys/gnark/constraint/bw6-633" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bw6-761/mpcsetup/setup_test.go b/backend/groth16/bw6-761/mpcsetup/setup_test.go index 4156903579..3431b53160 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-761/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" cs "github.com/consensys/gnark/constraint/bw6-761" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +109,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +128,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +142,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +177,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + return &Circuit{PreImage: preImage, Hash: hash} + +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl index 20c749a4f1..708f4b4df8 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl @@ -2,14 +2,14 @@ import ( "testing" "bytes" "github.com/consensys/gnark-crypto/ecc" + "io" + "slices" + "sync" {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} - "io" - "slices" - "sync" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/frontend" "github.com/consensys/gnark/frontend/cs/r1cs" @@ -19,76 +19,83 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - {{- if ne (toLower .Curve) "bn254" }} - if testing.Short() { - t.Skip() - } - {{- end}} - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit(t) - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + // Make contributions for serialized + for i := range phase1 { + if i == 0 { // no "predecessor" to the first contribution + p1.Initialize(domainSize) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) - pubWitness, err := witness.Public() - assert.NoError(err) + // Prepare for phase-2 + for i := range phase2 { + if i == 0 { + p2.Initialize(ccs, &srsCommons) + } + p2.Contribute() + serialized[i] = serialize(&p2) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - err = groth16.Verify(proof, &vk, pubWitness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) + + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -96,13 +103,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -113,8 +122,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -125,17 +136,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -158,32 +171,45 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + return &Circuit{PreImage: preImage, Hash: hash} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +} - return r +func getTestCircuit(t *testing.T) *cs.R1CS { + return sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + require.NoError(t, err) + return ccs.(*cs.R1CS) + })() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } From 77b727ad08656890550d9b9ce02c1fd9d9b5e4ac Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:41:51 -0600 Subject: [PATCH 067/105] refactor algo_utils -> utils --- std/gkr/api.go | 4 ++-- std/gkr/compile.go | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/std/gkr/api.go b/std/gkr/api.go index b46cfad8e5..03d8d8f71a 100644 --- a/std/gkr/api.go +++ b/std/gkr/api.go @@ -2,7 +2,7 @@ package gkr import ( "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/internal/algo_utils" + "github.com/consensys/gnark/internal/utils" ) func frontendVarToInt(a constraint.GkrVariable) int { @@ -12,7 +12,7 @@ func frontendVarToInt(a constraint.GkrVariable) int { func (api *API) NamedGate(gate string, in ...constraint.GkrVariable) constraint.GkrVariable { api.toStore.Circuit = append(api.toStore.Circuit, constraint.GkrWire{ Gate: gate, - Inputs: algo_utils.Map(in, frontendVarToInt), + Inputs: utils.Map(in, frontendVarToInt), }) api.assignments = append(api.assignments, nil) return constraint.GkrVariable(len(api.toStore.Circuit) - 1) diff --git a/std/gkr/compile.go b/std/gkr/compile.go index 31a790c56d..265f0fb607 100644 --- a/std/gkr/compile.go +++ b/std/gkr/compile.go @@ -7,7 +7,7 @@ import ( "github.com/consensys/gnark/constraint" "github.com/consensys/gnark/constraint/solver" "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/internal/algo_utils" + "github.com/consensys/gnark/internal/utils" fiatshamir "github.com/consensys/gnark/std/fiat-shamir" "github.com/consensys/gnark/std/hash" ) @@ -153,7 +153,7 @@ func (api *API) Solve(parentApi frontend.API) (Solution, error) { // Export returns the values of an output variable across all instances func (s Solution) Export(v frontend.Variable) []frontend.Variable { - return algo_utils.Map(s.permutations.SortedInstances, algo_utils.SliceAt(s.assignments[v.(constraint.GkrVariable)])) + return utils.Map(s.permutations.SortedInstances, utils.SliceAt(s.assignments[v.(constraint.GkrVariable)])) } // Verify encodes the verification circuitry for the GKR circuit @@ -183,7 +183,7 @@ func (s Solution) Verify(hashName string, initialChallenges ...frontend.Variable } s.toStore.ProveHintID = solver.GetHintID(proveHintPlaceholder) - forSnarkSorted := algo_utils.MapRange(0, len(s.toStore.Circuit), slicePtrAt(forSnark.circuit)) + forSnarkSorted := utils.MapRange(0, len(s.toStore.Circuit), slicePtrAt(forSnark.circuit)) if proof, err = DeserializeProof(forSnarkSorted, proofSerialized); err != nil { return err @@ -224,7 +224,7 @@ func newCircuitDataForSnark(info constraint.GkrInfo, assignment assignment) circ w := info.Circuit[i] circuit[i] = Wire{ Gate: ite(w.IsInput(), Gates[w.Gate], Gate(IdentityGate{})), - Inputs: algo_utils.Map(w.Inputs, circuitAt), + Inputs: utils.Map(w.Inputs, circuitAt), nbUniqueOutputs: w.NbUniqueOutputs, } snarkAssignment[&circuit[i]] = assignment[i] @@ -247,10 +247,10 @@ func (a assignment) NbInstances() int { } func (a assignment) Permute(p constraint.GkrPermutations) { - algo_utils.Permute(a, p.WiresPermutation) + utils.Permute(a, p.WiresPermutation) for i := range a { if a[i] != nil { - algo_utils.Permute(a[i], p.InstancesPermutation) + utils.Permute(a[i], p.InstancesPermutation) } } } From ca40c18407d20ddafe06f02bba5f2e5a2c2ab4b9 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:43:07 -0600 Subject: [PATCH 068/105] build regenerate tinyfield --- internal/tinyfield/element_ops_purego.go | 116 ----------------------- 1 file changed, 116 deletions(-) delete mode 100644 internal/tinyfield/element_ops_purego.go diff --git a/internal/tinyfield/element_ops_purego.go b/internal/tinyfield/element_ops_purego.go deleted file mode 100644 index 609d9cbd78..0000000000 --- a/internal/tinyfield/element_ops_purego.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2020-2024 ConsenSys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by consensys/gnark-crypto DO NOT EDIT - -package tinyfield - -import "math/bits" - -// MulBy3 x *= 3 (mod q) -func MulBy3(x *Element) { - var y Element - y.SetUint64(3) - x.Mul(x, &y) -} - -// MulBy5 x *= 5 (mod q) -func MulBy5(x *Element) { - var y Element - y.SetUint64(5) - x.Mul(x, &y) -} - -// MulBy13 x *= 13 (mod q) -func MulBy13(x *Element) { - var y Element - y.SetUint64(13) - x.Mul(x, &y) -} - -// Butterfly sets -// -// a = a + b (mod q) -// b = a - b (mod q) -func Butterfly(a, b *Element) { - _butterflyGeneric(a, b) -} - -func fromMont(z *Element) { - _fromMontGeneric(z) -} - -func reduce(z *Element) { - _reduceGeneric(z) -} - -// Mul z = x * y (mod q) -// -// x and y must be less than q -func (z *Element) Mul(x, y *Element) *Element { - - // In fact, since the modulus R fits on one register, the CIOS algorithm gets reduced to standard REDC (textbook Montgomery reduction): - // hi, lo := x * y - // m := (lo * qInvNeg) mod R - // (*) r := (hi * R + lo + m * q) / R - // reduce r if necessary - - // On the emphasized line, we get r = hi + (lo + m * q) / R - // If we write hi2, lo2 = m * q then R | m * q - lo2 ⇒ R | (lo * qInvNeg) q - lo2 = -lo - lo2 - // This shows lo + lo2 = 0 mod R. i.e. lo + lo2 = 0 if lo = 0 and R otherwise. - // Which finally gives (lo + m * q) / R = (lo + lo2 + R hi2) / R = hi2 + (lo+lo2) / R = hi2 + (lo != 0) - // This "optimization" lets us do away with one MUL instruction on ARM architectures and is available for all q < R. - - var r uint64 - hi, lo := bits.Mul64(x[0], y[0]) - if lo != 0 { - hi++ // x[0] * y[0] ≤ 2¹²⁸ - 2⁶⁵ + 1, meaning hi ≤ 2⁶⁴ - 2 so no need to worry about overflow - } - m := lo * qInvNeg - hi2, _ := bits.Mul64(m, q) - r, carry := bits.Add64(hi2, hi, 0) - - if carry != 0 || r >= q { - // we need to reduce - r -= q - } - z[0] = r - - return z -} - -// Square z = x * x (mod q) -// -// x must be less than q -func (z *Element) Square(x *Element) *Element { - // see Mul for algorithm documentation - - // In fact, since the modulus R fits on one register, the CIOS algorithm gets reduced to standard REDC (textbook Montgomery reduction): - // hi, lo := x * y - // m := (lo * qInvNeg) mod R - // (*) r := (hi * R + lo + m * q) / R - // reduce r if necessary - - // On the emphasized line, we get r = hi + (lo + m * q) / R - // If we write hi2, lo2 = m * q then R | m * q - lo2 ⇒ R | (lo * qInvNeg) q - lo2 = -lo - lo2 - // This shows lo + lo2 = 0 mod R. i.e. lo + lo2 = 0 if lo = 0 and R otherwise. - // Which finally gives (lo + m * q) / R = (lo + lo2 + R hi2) / R = hi2 + (lo+lo2) / R = hi2 + (lo != 0) - // This "optimization" lets us do away with one MUL instruction on ARM architectures and is available for all q < R. - - var r uint64 - hi, lo := bits.Mul64(x[0], x[0]) - if lo != 0 { - hi++ // x[0] * y[0] ≤ 2¹²⁸ - 2⁶⁵ + 1, meaning hi ≤ 2⁶⁴ - 2 so no need to worry about overflow - } - m := lo * qInvNeg - hi2, _ := bits.Mul64(m, q) - r, carry := bits.Add64(hi2, hi, 0) - - if carry != 0 || r >= q { - // we need to reduce - r -= q - } - z[0] = r - - return z -} From 76cb260b8bc470d0570ac2cfaf4921cac6922274 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:46:46 -0600 Subject: [PATCH 069/105] build gnark-crypto dependency --- go.mod | 6 ++---- go.sum | 7 +------ 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/go.mod b/go.mod index f5b5106d52..ae4b023251 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.24 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20241122181107-03e007d865c0 + github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 @@ -36,6 +36,4 @@ require ( golang.org/x/sys v0.28.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect -) - -replace github.com/consensys/gnark-crypto => /Users/arya/gnark-crypto +) \ No newline at end of file diff --git a/go.sum b/go.sum index 51a0a31e84..834bb9da21 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,7 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20241122181107-03e007d865c0 h1:uFZaZWG0FOoiFN3fAQzH2JXDuybdNwiJzBujy81YtU4= -github.com/consensys/gnark-crypto v0.14.1-0.20241122181107-03e007d865c0/go.mod h1:F/hJyWBcTr1sWeifAKfEN3aVb3G4U5zheEC8IbWQun4= +github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -306,8 +305,6 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= -golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -466,8 +463,6 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg= -golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= From c146e7df99ebadd58b7324bf128f9125537e9ddb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 17:47:42 -0600 Subject: [PATCH 070/105] build go mod too tidy --- go.sum | 1 + 1 file changed, 1 insertion(+) diff --git a/go.sum b/go.sum index 834bb9da21..1d89b277a4 100644 --- a/go.sum +++ b/go.sum @@ -61,6 +61,7 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= +github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149 h1:afTz+KUgD2+uD7ElmkNyaeR3jCG3PdBFj5mEAbdcUe4= github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= From 965b7139888b5ef154d2d9bf154bc6d9b8d3d4ba Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:08:34 -0600 Subject: [PATCH 071/105] docs remove inane comments --- backend/groth16/bls12-377/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls12-381/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls24-315/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls24-317/mpcsetup/phase1.go | 4 ++-- backend/groth16/bn254/mpcsetup/phase1.go | 4 ++-- backend/groth16/bw6-633/mpcsetup/phase1.go | 4 ++-- backend/groth16/bw6-761/mpcsetup/phase1.go | 4 ++-- .../template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl | 4 ++-- 8 files changed, 16 insertions(+), 16 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index b6215462ac..8517015f78 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 213744f7bc..1ddcd2dda0 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index e96928265d..a169040374 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index c339bf3412..dd978450fc 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index a9c5935df1..90cab8339c 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 08b774b136..ebe0c93248 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 88754b5067..d71fe8e088 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -36,7 +36,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -97,7 +97,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 36234ee881..7cca93d256 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -31,7 +31,7 @@ type SrsCommons struct { // // Also known as "Powers of Tau" type Phase1 struct { - proofs struct { // "main" contributions + proofs struct { Tau, Alpha, Beta valueUpdate } parameters SrsCommons @@ -92,7 +92,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // TODO @gbotrel working with jacobian points here will help with perf. tauUpdates := powers(tauUpdate, len(c.G1.Tau)) - // saving 3 exactly scalar muls among millions. Not a significant gain but might as well. + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) From dd5cac21eee7e49a22428c7ff5519e6ce0015826 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:18:06 -0600 Subject: [PATCH 072/105] perf parallelize phase 2 updates --- backend/groth16/bn254/mpcsetup/phase2.go | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 6f30012d96..3d50ef818c 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { From be02fb6c83365e29afc6b0a1308911f50c2843b3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:19:42 -0600 Subject: [PATCH 073/105] build generify parallelization --- backend/groth16/bls12-377/mpcsetup/phase2.go | 21 ++++++++++--------- backend/groth16/bls12-381/mpcsetup/phase2.go | 21 ++++++++++--------- backend/groth16/bls24-315/mpcsetup/phase2.go | 21 ++++++++++--------- backend/groth16/bls24-317/mpcsetup/phase2.go | 21 ++++++++++--------- backend/groth16/bw6-633/mpcsetup/phase2.go | 21 ++++++++++--------- backend/groth16/bw6-761/mpcsetup/phase2.go | 21 ++++++++++--------- .../groth16/mpcsetup/phase2.go.tmpl | 21 ++++++++++--------- 7 files changed, 77 insertions(+), 70 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index b5647d47a7..825dd65a3b 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-377" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index b9af189768..692b079880 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-381" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 6fb1c91dee..35fb5020b1 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-315" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 305154621c..f57126c678 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-317" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 530dbcfd55..7e18270ff4 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-633" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index c5cfad1b23..75435b0455 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -16,6 +16,7 @@ import ( "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-761" + "github.com/consensys/gnark/internal/utils" "math/big" "slices" ) @@ -121,14 +122,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -137,12 +142,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 2bf6316a5f..12876f53cb 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -2,6 +2,7 @@ import ( "bytes" "crypto/sha256" "errors" + "github.com/consensys/gnark/internal/utils" "math/big" "fmt" "slices" @@ -115,14 +116,18 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { panic("unknown type") } } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } for i := range sigma { sigma[i].BigInt(&I) - s := p.Parameters.G1.SigmaCKK[i] - for j := range s { - scale(&s[j]) - } scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) } delta.BigInt(&I) @@ -131,12 +136,8 @@ func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { delta.Inverse(delta) delta.BigInt(&I) - for i := range p.Parameters.G1.Z { - scale(&p.Parameters.G1.Z[i]) - } - for i := range p.Parameters.G1.PKK { - scale(&p.Parameters.G1.PKK[i]) - } + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } func (p *Phase2) Contribute() { From 3b1982412e19bddd9b32d0a631b01fbcf849de30 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:25:39 -0600 Subject: [PATCH 074/105] refactor sameRatio"Unsafe" -> sameRatio --- backend/groth16/bn254/mpcsetup/phase1.go | 2 +- backend/groth16/bn254/mpcsetup/utils.go | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 90cab8339c..381749980b 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 18d628a9fe..05bb3cf892 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } From a3145970bd4e5fa6243253d9025ce4180fe6b240 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:26:40 -0600 Subject: [PATCH 075/105] build generify refactor --- backend/groth16/bls12-377/mpcsetup/phase1.go | 2 +- backend/groth16/bls12-377/mpcsetup/utils.go | 8 ++++---- backend/groth16/bls12-381/mpcsetup/phase1.go | 2 +- backend/groth16/bls12-381/mpcsetup/utils.go | 8 ++++---- backend/groth16/bls24-315/mpcsetup/phase1.go | 2 +- backend/groth16/bls24-315/mpcsetup/utils.go | 8 ++++---- backend/groth16/bls24-317/mpcsetup/phase1.go | 2 +- backend/groth16/bls24-317/mpcsetup/utils.go | 8 ++++---- backend/groth16/bw6-633/mpcsetup/phase1.go | 2 +- backend/groth16/bw6-633/mpcsetup/utils.go | 8 ++++---- backend/groth16/bw6-761/mpcsetup/phase1.go | 2 +- backend/groth16/bw6-761/mpcsetup/utils.go | 8 ++++---- .../template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl | 2 +- .../template/zkpschemes/groth16/mpcsetup/utils.go.tmpl | 8 ++++---- 14 files changed, 35 insertions(+), 35 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index 8517015f78..bd7b9a4047 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index 7d94f0e4ae..bbd047b2a8 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 1ddcd2dda0..364a626270 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 07ba042c23..38cb18a019 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index a169040374..a9a90fa1e6 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 0cd37ad76e..89bee165bf 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index dd978450fc..00d5079904 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index e36f24d274..2a88e87477 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index ebe0c93248..4db33250ab 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 7fb5a5f554..e9a3a17aec 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index d71fe8e088..5420b27de6 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -246,7 +246,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index f2b73774e3..fd36104b66 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -82,7 +82,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -266,18 +266,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 7cca93d256..3c09baec49 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -241,7 +241,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) - if !sameRatioUnsafe(g1Num, g1Denom, g2Num, g2Denom) { + if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 30ee2d42b0..0831161075 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -75,7 +75,7 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { } // Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatioUnsafe(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { +func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { var nd1 curve.G1Affine nd1.Neg(&d1) res, err := curve.PairingCheck( @@ -259,18 +259,18 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base _, _, g1, _ := curve.Generators() - if !sameRatioUnsafe(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r + if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r return errors.New("contribution proof of knowledge verification failed") } // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatioUnsafe(num.g1, denom.g1, *num.g2, *denom.g2) { + if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { return errors.New("g2 update inconsistent") } // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatioUnsafe(num.g1, denom.g1, x.contributionPok, r) { + if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { return errors.New("g1 update inconsistent") } From 7eeb48adc07d6ef14a2147c43218246c314514ae Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:32:29 -0600 Subject: [PATCH 076/105] refactor linearCombCoeffs -> randomMonomials --- backend/groth16/bn254/mpcsetup/phase1.go | 2 +- backend/groth16/bn254/mpcsetup/phase2.go | 2 +- backend/groth16/bn254/mpcsetup/utils.go | 15 ++++++++------- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 381749980b..5710430518 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 3d50ef818c..b5007e0737 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 05bb3cf892..14384c7bf4 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result From 1e99b581a3faf262fb0d546c76b51200db725945 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:34:39 -0600 Subject: [PATCH 077/105] build generify refactor --- backend/groth16/bls12-377/mpcsetup/phase1.go | 2 +- backend/groth16/bls12-377/mpcsetup/phase2.go | 2 +- backend/groth16/bls12-377/mpcsetup/utils.go | 15 ++++++++------- backend/groth16/bls12-381/mpcsetup/phase1.go | 2 +- backend/groth16/bls12-381/mpcsetup/phase2.go | 2 +- backend/groth16/bls12-381/mpcsetup/utils.go | 15 ++++++++------- backend/groth16/bls24-315/mpcsetup/phase1.go | 2 +- backend/groth16/bls24-315/mpcsetup/phase2.go | 2 +- backend/groth16/bls24-315/mpcsetup/utils.go | 15 ++++++++------- backend/groth16/bls24-317/mpcsetup/phase1.go | 2 +- backend/groth16/bls24-317/mpcsetup/phase2.go | 2 +- backend/groth16/bls24-317/mpcsetup/utils.go | 15 ++++++++------- backend/groth16/bw6-633/mpcsetup/phase1.go | 2 +- backend/groth16/bw6-633/mpcsetup/phase2.go | 2 +- backend/groth16/bw6-633/mpcsetup/utils.go | 15 ++++++++------- backend/groth16/bw6-761/mpcsetup/phase1.go | 2 +- backend/groth16/bw6-761/mpcsetup/phase2.go | 2 +- backend/groth16/bw6-761/mpcsetup/utils.go | 15 ++++++++------- .../zkpschemes/groth16/mpcsetup/phase1.go.tmpl | 4 ++-- .../zkpschemes/groth16/mpcsetup/phase2.go.tmpl | 2 +- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 15 ++++++++------- 21 files changed, 71 insertions(+), 64 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index bd7b9a4047..40f1cf4554 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index 825dd65a3b..a5687a8ebd 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index bbd047b2a8..fa5a2c50a5 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 364a626270..877889ec8a 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 692b079880..ea89bb40e5 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 38cb18a019..3b85fa6be7 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index a9a90fa1e6..c3b127a942 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 35fb5020b1..54d178b3c5 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 89bee165bf..86e8dbf9af 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 00d5079904..7af53f2105 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index f57126c678..1714d037fd 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 2a88e87477..e1df7db049 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 4db33250ab..79a072d644 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 7e18270ff4..5918ec044c 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index e9a3a17aec..24ed45df5b 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 5420b27de6..518703f993 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -244,7 +244,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 75435b0455..53743ccdba 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -72,7 +72,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index fd36104b66..e97ebb84af 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -29,21 +29,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 3c09baec49..88228e79a9 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -239,7 +239,7 @@ func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve. g1s = append(g1s, d...) g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, linearCombCoeffs(len(b))) + g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { return errors.New("multi-value update check failed") @@ -273,4 +273,4 @@ func NewPhase1(N uint64) *Phase1 { res := new(Phase1) res.Initialize(N) return res -} +} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 12876f53cb..8db729dbec 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -66,7 +66,7 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := linearCombCoeffs(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) + r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { g1Num := linearCombination(g1Numerator, r) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 0831161075..63a30fccc3 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -22,21 +22,22 @@ func bitReverse[T any](a []T) { } } -func linearCombCoeffs(n int) []fr.Element { - return bivariateRandomMonomials(n) +// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a +func randomMonomials(N int) []fr.Element { + return bivariateRandomMonomials(N) } // Returns [1, a, a², ..., aᴺ⁻¹ ] -func powers(a *fr.Element, n int) []fr.Element { +func powers(a *fr.Element, N int) []fr.Element { - result := make([]fr.Element, n) - if n >= 1 { + result := make([]fr.Element, N) + if N >= 1 { result[0].SetOne() } - if n >= 2 { + if N >= 2 { result[1].Set(a) } - for i := 2; i < n; i++ { + for i := 2; i < N; i++ { result[i].Mul(&result[i-1], a) } return result From 605f2359ec4888e96a5b7b7f8df80d0c026d85fb Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:37:21 -0600 Subject: [PATCH 078/105] docs scalar goes on the left --- backend/groth16/bls12-377/mpcsetup/phase2.go | 2 +- backend/groth16/bls12-381/mpcsetup/phase2.go | 2 +- backend/groth16/bls24-315/mpcsetup/phase2.go | 2 +- backend/groth16/bls24-317/mpcsetup/phase2.go | 2 +- backend/groth16/bn254/mpcsetup/phase2.go | 2 +- backend/groth16/bw6-633/mpcsetup/phase2.go | 2 +- backend/groth16/bw6-761/mpcsetup/phase2.go | 2 +- .../backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index a5687a8ebd..845f74a824 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index ea89bb40e5..64824e1d5a 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 54d178b3c5..7702610cd8 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 1714d037fd..ce6c6c23f7 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index b5007e0737..de20b8206a 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 5918ec044c..2746d07b94 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 53743ccdba..23d61bfd30 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -42,7 +42,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 8db729dbec..1f2792d447 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -36,7 +36,7 @@ type Phase2 struct { Delta curve.G1Affine Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ - SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = Cᵢⱼσᵢ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine From 8495c2c164b704935fa0724bd8e5c8a2c7e87672 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 18:50:46 -0600 Subject: [PATCH 079/105] style: remove `if i==0` at the beginning of loop --- backend/groth16/bls12-377/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bls12-381/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bls24-315/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bls24-317/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bn254/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bw6-633/mpcsetup/setup_test.go | 11 ++--------- backend/groth16/bw6-761/mpcsetup/setup_test.go | 11 ++--------- .../zkpschemes/groth16/mpcsetup/setup_test.go.tmpl | 11 ++--------- 8 files changed, 16 insertions(+), 72 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/setup_test.go b/backend/groth16/bls12-377/mpcsetup/setup_test.go index 9db22ddaca..ed1817de88 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-377/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bls12-381/mpcsetup/setup_test.go b/backend/groth16/bls12-381/mpcsetup/setup_test.go index 23ac6ce164..71f5b1f44d 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-381/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bls24-315/mpcsetup/setup_test.go b/backend/groth16/bls24-315/mpcsetup/setup_test.go index 37ceac87cb..efe24b7334 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-315/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bls24-317/mpcsetup/setup_test.go b/backend/groth16/bls24-317/mpcsetup/setup_test.go index acaa7f854f..e621f3e838 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-317/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 16fe9b265d..fc399f453a 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bw6-633/mpcsetup/setup_test.go b/backend/groth16/bw6-633/mpcsetup/setup_test.go index c7c44b2184..b23e35051b 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-633/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/backend/groth16/bw6-761/mpcsetup/setup_test.go b/backend/groth16/bw6-761/mpcsetup/setup_test.go index 3431b53160..70004301d4 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-761/mpcsetup/setup_test.go @@ -59,12 +59,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -84,11 +80,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl index 708f4b4df8..64f52ed3cc 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl @@ -53,12 +53,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert.Equal(len(b), int(n)) } - // Make contributions for serialized + p1.Initialize(domainSize) for i := range phase1 { - if i == 0 { // no "predecessor" to the first contribution - p1.Initialize(domainSize) - } - p1.Contribute() serialized[i] = serialize(&p1) } @@ -78,11 +74,8 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { srsCommons = commonsRead } - // Prepare for phase-2 + p2.Initialize(ccs, &srsCommons) for i := range phase2 { - if i == 0 { - p2.Initialize(ccs, &srsCommons) - } p2.Contribute() serialized[i] = serialize(&p2) } From e6f8915b76d1fd909a42294415bc7c476ea48ed5 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:04:18 -0600 Subject: [PATCH 080/105] chore minor improvements --- backend/groth16/bls12-377/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bls12-381/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bls24-315/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bls24-317/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bn254/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bw6-633/mpcsetup/utils.go | 23 ++++++---------- backend/groth16/bw6-761/mpcsetup/utils.go | 23 ++++++---------- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 27 +++++++------------ 8 files changed, 66 insertions(+), 122 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index fa5a2c50a5..f35078fd09 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 3b85fa6be7..4e964a9958 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 86e8dbf9af..4af8de9463 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index e1df7db049..85e237b212 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 14384c7bf4..f4b287732c 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 24ed45df5b..342bc9c429 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index e97ebb84af..88d7aeb797 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -201,7 +201,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -253,7 +253,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -285,15 +285,16 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) +func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { - res[i] = &s[i] + if !s[i].IsInSubGroup() { + return false + } } - return res + return true } -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { +func areInSubGroupG2(s []curve.G2Affine) bool { for i := range s { if !s[i].IsInSubGroup() { return false @@ -302,14 +303,6 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - -func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) -} - // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... // all concatenated in the same slice func bivariateRandomMonomials(ends ...int) []fr.Element { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 63a30fccc3..1d37386c54 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -194,7 +194,7 @@ func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, // π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) + buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) buf.Write(sG1.Marshal()) buf.Write(challenge) spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) @@ -246,7 +246,7 @@ func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contribution // verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 // it checks the proof of knowledge of the contribution, and the fact that the product of the contribution // and previous commitment makes the new commitment. -// prevCommitment is assumed to be valid. No subgroup check and the like. +// denom, num are assumed to be valid. No subgroup check and the like. func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { noG2 := denom.g2 == nil if noG2 != (num.g2 == nil) { @@ -278,16 +278,8 @@ func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error return nil } -func toRefs[T any](s []T) []*T { - res := make([]*T, len(s)) - for i := range s { - res[i] = &s[i] - } - return res -} - -func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { - for i := range s { +func areInSubGroupG1(s []curve.G1Affine) bool { + for i := range s { if !s[i].IsInSubGroup() { return false } @@ -295,12 +287,13 @@ func areInSubGroup[T interface{ IsInSubGroup() bool }](s []T) bool { return true } -func areInSubGroupG1(s []curve.G1Affine) bool { - return areInSubGroup(toRefs(s)) -} - func areInSubGroupG2(s []curve.G2Affine) bool { - return areInSubGroup(toRefs(s)) + for i := range s { + if !s[i].IsInSubGroup() { + return false + } + } + return true } // bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... From d60efa47ba05a1decbb12e1c19fb24c3dab17090 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 18 Dec 2024 19:08:13 -0600 Subject: [PATCH 081/105] style remove deadcode and newline --- .../backend/template/zkpschemes/groth16/groth16.setup.go.tmpl | 1 - io/roundtrip.go | 2 -- std/gkr/api_test.go | 1 - 3 files changed, 4 deletions(-) diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl index 7698931372..712e5be68c 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl @@ -12,7 +12,6 @@ import ( "math/bits" ) - // ProvingKey is used by a Groth16 prover to encode a proof of a statement // Notation follows Figure 4. in DIZK paper https://eprint.iacr.org/2018/691.pdf type ProvingKey struct { diff --git a/io/roundtrip.go b/io/roundtrip.go index 66d38aa7c8..c598806a87 100644 --- a/io/roundtrip.go +++ b/io/roundtrip.go @@ -53,8 +53,6 @@ func RoundTripCheck(from any, to func() any) error { return err } - //fmt.Println(base64.StdEncoding.EncodeToString(buf.Bytes()[:written])) - if err := reconstruct(written); err != nil { return err } diff --git a/std/gkr/api_test.go b/std/gkr/api_test.go index 25fc3c3c1e..1dec782f71 100644 --- a/std/gkr/api_test.go +++ b/std/gkr/api_test.go @@ -268,7 +268,6 @@ func TestApiMul(t *testing.T) { func BenchmarkMiMCMerkleTree(b *testing.B) { depth := 14 - //fmt.Println("start") bottom := make([]frontend.Variable, 1< Date: Wed, 18 Dec 2024 19:09:43 -0600 Subject: [PATCH 082/105] fix staticcheck --- backend/groth16/internal/utils.go | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/groth16/internal/utils.go b/backend/groth16/internal/utils.go index be4bcf34fa..0694b18497 100644 --- a/backend/groth16/internal/utils.go +++ b/backend/groth16/internal/utils.go @@ -51,7 +51,6 @@ func (i *MergeIterator) findLeast() { i.leastIndex = j } } - return } // Peek returns the next smallest value and the index of the slice it came from From 4ecbbc157c2e78d99e7b12e818fc1a72c4a801dc Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Fri, 20 Dec 2024 14:34:49 -0600 Subject: [PATCH 083/105] build: update gnark-crypto dep --- go.mod | 4 ++-- go.sum | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 4357be7b08..d9c5bb70a4 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.24 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149 + github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 @@ -34,4 +34,4 @@ require ( golang.org/x/sys v0.28.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect -) \ No newline at end of file +) diff --git a/go.sum b/go.sum index 358f290d97..fa1694daaf 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,8 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149 h1:afTz+KUgD2+uD7ElmkNyaeR3jCG3PdBFj5mEAbdcUe4= -github.com/consensys/gnark-crypto v0.14.1-0.20241218224600-9f5ee8568149/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= +github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97 h1:he7lvcVw/9jbbVKOEBL8w8wMzWTiBUCHUWTgDJLUgU0= +github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= From b83a044d298048af7ebbfffcbc6606f944c49686 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Sat, 21 Dec 2024 08:51:27 -0600 Subject: [PATCH 084/105] build: update gnark-crypto dep --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index d9c5bb70a4..81af06ca79 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.24 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97 + github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 diff --git a/go.sum b/go.sum index fa1694daaf..b11293f5ce 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,8 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97 h1:he7lvcVw/9jbbVKOEBL8w8wMzWTiBUCHUWTgDJLUgU0= -github.com/consensys/gnark-crypto v0.14.1-0.20241220202637-e12162d89a97/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= +github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b h1:OHVIrLobpH31u9k2F5RmqFPkasbXcTMZ9E2Jzimu+cM= +github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= From b994b506048ccbddca3a6e5d83e40ae3ab4d616d Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 24 Dec 2024 16:39:17 -0600 Subject: [PATCH 085/105] refac: remove tests for utils --- backend/groth16/bn254/mpcsetup/unit_test.go | 245 -------------------- 1 file changed, 245 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index b7d24bb800..f96b2ec2dd 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -1,7 +1,6 @@ package mpcsetup import ( - "bytes" "fmt" "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" @@ -20,48 +19,6 @@ import ( "testing" ) -// small tests for sub-functionalities of the mpc setup -// this file is not autogenerated, and not generified for other curves - -func TestContributionPok(t *testing.T) { - const ( - pokChallenge = "challenge" - pokDst = 1 - ) - x0, err := curve.HashToG1([]byte("contribution test"), nil) - require.NoError(t, err) - proof, d := newValueUpdate([]byte(pokChallenge), pokDst) - var ( - x1 curve.G1Affine - dI big.Int - ) - d.BigInt(&dI) - x1.ScalarMultiplication(&x0, &dI) - - // verify proof - no G2 - require.NoError(t, proof.verify(pair{x0, nil}, pair{x1, nil}, []byte(pokChallenge), pokDst)) - - // verify proof - with G2 - y0, err := curve.RandomOnG2() - require.NoError(t, err) - var y1 curve.G2Affine - y1.ScalarMultiplication(&y0, &dI) - - require.NoError(t, proof.verify(pair{x0, &y0}, pair{x1, &y1}, []byte(pokChallenge), pokDst)) - - // read/write round-trip - var bb bytes.Buffer - n0, err := proof.WriteTo(&bb) - require.NoError(t, err) - var proofBack valueUpdate - n1, err := proofBack.ReadFrom(&bb) - require.NoError(t, err) - require.Equal(t, n0, n1) - - require.NoError(t, proofBack.verify(pair{x0, nil}, pair{x1, nil}, []byte(pokChallenge), pokDst)) - require.NoError(t, proofBack.verify(pair{x0, &y0}, pair{x1, &y1}, []byte(pokChallenge), pokDst)) -} - // TestSetupBeaconOnly tests the setup/key extraction // as well as the random beacon contribution // without any untrusted contributors @@ -115,13 +72,6 @@ func TestOnePhase1Contribute(t *testing.T) { testAll(t, 2, 0) } -func TestUpdateCheck(t *testing.T) { - _, _, g1, g2 := curve.Generators() - g1Slice := []curve.G1Affine{g1, g1, g1} - g2Slice := []curve.G2Affine{g2, g2} - require.NoError(t, multiValueUpdateCheck(g1Slice, g2Slice, g1Slice, g1Slice)) -} - func commonsSmallValues(N, tau, alpha, beta uint64) SrsCommons { var ( res SrsCommons @@ -244,201 +194,6 @@ func TestPedersen(t *testing.T) { } } -func TestBivariateRandomMonomials(t *testing.T) { - xDeg := []int{3, 2, 3} - ends := partialSums(xDeg...) - values := bivariateRandomMonomials(ends...) - //extract the variables - x := make([]fr.Element, slices.Max(xDeg)) - y := make([]fr.Element, len(ends)) - x[1].Div(&values[1], &values[0]) - y[1].Div(&values[xDeg[0]], &values[0]) - - x[0].SetOne() - y[0].SetOne() - - for i := range x[:len(x)-1] { - x[i+1].Mul(&x[i], &x[1]) - } - - for i := range y[:len(x)-1] { - y[i+1].Mul(&y[i], &y[1]) - } - - prevEnd := 0 - for i := range ends { - for j := range xDeg[i] { - var z fr.Element - z.Mul(&y[i], &x[j]) - require.Equal(t, z.String(), values[prevEnd+j].String(), "X^%d Y^%d: expected %s, encountered %s", j, i) - } - prevEnd = ends[i] - } -} - -func TestLinearCombinationsG1(t *testing.T) { - - test := func(ends []int, powers, truncatedPowers, shiftedPowers []fr.Element, A ...curve.G1Affine) { - - multiExpConfig := ecc.MultiExpConfig{ - NbTasks: 1, - } - - if len(A) == 0 { - A = make([]curve.G1Affine, ends[len(ends)-1]) - var err error - for i := range A { - A[i], err = curve.HashToG1([]byte{byte(i)}, nil) - require.NoError(t, err) - } - } - - truncated, shifted := linearCombinationsG1(slices.Clone(A), powers, ends) - - var res curve.G1Affine - - _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) - require.NoError(t, err) - require.Equal(t, res, truncated, "truncated") - - _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) - require.NoError(t, err) - require.Equal(t, res, shifted, "shifted") - } - - _, _, g, _ := curve.Generators() - var infty curve.G1Affine - - test( - []int{3}, - frs(1, -1, 1), - frs(1, -1, 0), - frs(0, 1, -1), - infty, g, infty, - ) - - test( - []int{3}, - frs(1, 1, 1), - frs(1, 1, 0), - frs(0, 1, 1), - infty, g, infty, - ) - - test( - []int{3}, - frs(1, 1, 1), - frs(1, 1, 0), - frs(0, 1, 1), - infty, infty, g, - ) - - test( - []int{3}, - frs(1, 1, 1), - frs(1, 1, 0), - frs(0, 1, 1), - g, infty, infty, - ) - - test( - []int{3}, - frs(1, 2, 4), - frs(1, 2, 0), - frs(0, 1, 2), - ) - - test( - []int{3, 6}, - frs(1, 1, 1, 1, 1, 1), - frs(1, 1, 0, 1, 1, 0), - frs(0, 1, 1, 0, 1, 1), - g, infty, infty, infty, infty, infty, - ) - - test( - []int{3, 6}, - frs(1, -1, 1, 1, -1, 1), - frs(1, -1, 0, 1, -1, 0), - frs(0, 1, -1, 0, 1, -1), - g, infty, infty, infty, infty, infty, - ) - - test( - []int{4, 7}, - frs(1, 2, 4, 8, 3, 6, 12), - frs(1, 2, 4, 0, 3, 6, 0), - frs(0, 1, 2, 4, 0, 3, 6), - ) -} - -func TestLinearCombinationsG2(t *testing.T) { - test := func(powers []fr.Element, A ...curve.G2Affine) { - - multiExpConfig := ecc.MultiExpConfig{ - NbTasks: 1, - } - - if len(A) == 0 { - A = make([]curve.G2Affine, len(powers)) - var err error - for i := range A { - A[i], err = curve.RandomOnG2() - require.NoError(t, err) - } - } - - truncated, shifted := linearCombinationsG2(slices.Clone(A), powers) - - truncatedPowers := make([]fr.Element, len(powers)) - copy(truncatedPowers[:len(truncatedPowers)-1], powers) - shiftedPowers := make([]fr.Element, len(powers)) - copy(shiftedPowers[1:], powers) - - var res curve.G2Affine - - _, err := res.MultiExp(A, truncatedPowers, multiExpConfig) - require.NoError(t, err) - require.Equal(t, res, truncated, "truncated") - - _, err = res.MultiExp(A, shiftedPowers, multiExpConfig) - require.NoError(t, err) - require.Equal(t, res, shifted, "shifted") - } - - _, _, _, g := curve.Generators() - var infty curve.G2Affine - - test( - frs(1, 2, 4), - infty, infty, g, - ) - - test( - frs(1, -1, 1), - ) - - test( - frs(1, 3, 9, 27, 81), - ) -} - -func ones(N int) []fr.Element { - res := make([]fr.Element, N) - for i := range res { - res[i].SetOne() - } - return res -} - -func frs(x ...int) []fr.Element { - res := make([]fr.Element, len(x)) - for i := range res { - res[i].SetInt64(int64(x[i])) - } - return res -} - func TestPhase2Serialization(t *testing.T) { testRoundtrip := func(_cs constraint.ConstraintSystem) { From bf2d2b3f16a8435332d9be86eb90a631335c827b Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 13:58:43 -0600 Subject: [PATCH 086/105] refactor: trim down mpcsetup "utils" --- backend/groth16/bn254/mpcsetup/utils.go | 291 ------------------------ 1 file changed, 291 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index f4b287732c..0a94dbe366 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { @@ -29,11 +25,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +73,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -303,54 +91,6 @@ func areInSubGroupG2(s []curve.G2Affine) bool { return true } -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - func partialSums(s ...int) []int { if len(s) == 0 { return nil @@ -362,34 +102,3 @@ func partialSums(s ...int) []int { } return sums } - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} From b3dc4015cb0ff1d641e621dbf357ba7fc0f66084 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 14:44:15 -0600 Subject: [PATCH 087/105] refactor: groth16 mpcsetup to use gnark-crypto tools --- backend/groth16/bn254/mpcsetup/phase1.go | 83 ++++-------------------- backend/groth16/bn254/mpcsetup/phase2.go | 33 +++++----- 2 files changed, 28 insertions(+), 88 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 5710430518..e09d4dbcf8 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,67 +197,6 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() p.WriteTo(sha) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index de20b8206a..87421e7bfd 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) From 70fc9488a7463513e5de5a9215edfcc628f6d087 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 14:45:33 -0600 Subject: [PATCH 088/105] refactor: use max instead of utils.Max --- backend/groth16/bls12-377/verify.go | 3 +-- backend/groth16/bls12-381/verify.go | 3 +-- backend/groth16/bls24-315/verify.go | 3 +-- backend/groth16/bls24-317/verify.go | 3 +-- backend/groth16/bn254/verify.go | 3 +-- backend/groth16/bw6-633/verify.go | 3 +-- backend/groth16/bw6-761/verify.go | 3 +-- .../backend/template/zkpschemes/groth16/groth16.verify.go.tmpl | 3 +-- 8 files changed, 8 insertions(+), 16 deletions(-) diff --git a/backend/groth16/bls12-377/verify.go b/backend/groth16/bls12-377/verify.go index ff95c7ec6b..48a44c8bd6 100644 --- a/backend/groth16/bls12-377/verify.go +++ b/backend/groth16/bls12-377/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls12-381/verify.go b/backend/groth16/bls12-381/verify.go index 798c093bde..e0b88ce047 100644 --- a/backend/groth16/bls12-381/verify.go +++ b/backend/groth16/bls12-381/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls24-315/verify.go b/backend/groth16/bls24-315/verify.go index 580ae2031f..8e5aeb859e 100644 --- a/backend/groth16/bls24-315/verify.go +++ b/backend/groth16/bls24-315/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls24-317/verify.go b/backend/groth16/bls24-317/verify.go index 75ab20a555..9673ae5832 100644 --- a/backend/groth16/bls24-317/verify.go +++ b/backend/groth16/bls24-317/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bn254/verify.go b/backend/groth16/bn254/verify.go index eadaedd522..318d6228f2 100644 --- a/backend/groth16/bn254/verify.go +++ b/backend/groth16/bn254/verify.go @@ -22,7 +22,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -70,7 +69,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bw6-633/verify.go b/backend/groth16/bw6-633/verify.go index 24d2f97a45..48781fe8cd 100644 --- a/backend/groth16/bw6-633/verify.go +++ b/backend/groth16/bw6-633/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bw6-761/verify.go b/backend/groth16/bw6-761/verify.go index 81b54ece39..0e6af71d53 100644 --- a/backend/groth16/bw6-761/verify.go +++ b/backend/groth16/bw6-761/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl index 06abd155e7..24682c2b74 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl @@ -17,7 +17,6 @@ import ( {{- template "import_fr" . }} {{- template "import_pedersen" .}} {{- template "import_hash_to_field" . }} - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -65,7 +64,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) From 8d06fc5fa75f2e05d9917504f8b54db79e3a002d Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 14:48:22 -0600 Subject: [PATCH 089/105] fix: hash() panic on error --- backend/groth16/bn254/mpcsetup/phase1.go | 4 +++- backend/groth16/bn254/mpcsetup/phase2.go | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index e09d4dbcf8..d64db75e47 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -199,7 +199,9 @@ func (p *Phase1) Verify(next *Phase1) error { func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 87421e7bfd..920f0bfce5 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -339,7 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } From f63da809c9cdaf782143a554f75c03dc6f34a6d3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 14:52:15 -0600 Subject: [PATCH 090/105] refactor: clean up unused funcs --- backend/groth16/bn254/mpcsetup/phase2.go | 12 ------------ backend/groth16/bn254/mpcsetup/utils.go | 12 ------------ 2 files changed, 24 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 920f0bfce5..01bf93d4f6 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -345,15 +345,3 @@ func (p *Phase2) hash() []byte { sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 0a94dbe366..6e14556802 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -90,15 +90,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} From 8f5e9824f88b6dad1b5d7deab3004bf3ccf2d2c4 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 14:58:12 -0600 Subject: [PATCH 091/105] chore: generify changes --- backend/groth16/bls12-377/mpcsetup/phase1.go | 87 +---- backend/groth16/bls12-377/mpcsetup/phase2.go | 49 ++- backend/groth16/bls12-377/mpcsetup/utils.go | 299 ----------------- backend/groth16/bls12-381/mpcsetup/phase1.go | 87 +---- backend/groth16/bls12-381/mpcsetup/phase2.go | 49 ++- backend/groth16/bls12-381/mpcsetup/utils.go | 299 ----------------- backend/groth16/bls24-315/mpcsetup/phase1.go | 87 +---- backend/groth16/bls24-315/mpcsetup/phase2.go | 49 ++- backend/groth16/bls24-315/mpcsetup/utils.go | 299 ----------------- backend/groth16/bls24-317/mpcsetup/phase1.go | 87 +---- backend/groth16/bls24-317/mpcsetup/phase2.go | 49 ++- backend/groth16/bls24-317/mpcsetup/utils.go | 299 ----------------- backend/groth16/bw6-633/mpcsetup/phase1.go | 87 +---- backend/groth16/bw6-633/mpcsetup/phase2.go | 49 ++- backend/groth16/bw6-633/mpcsetup/utils.go | 299 ----------------- backend/groth16/bw6-761/mpcsetup/phase1.go | 87 +---- backend/groth16/bw6-761/mpcsetup/phase2.go | 49 ++- backend/groth16/bw6-761/mpcsetup/utils.go | 299 ----------------- go.mod | 2 +- go.sum | 4 +- .../groth16/mpcsetup/phase1.go.tmpl | 87 +---- .../groth16/mpcsetup/phase2.go.tmpl | 49 ++- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 303 +----------------- 23 files changed, 243 insertions(+), 2812 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index 40f1cf4554..b28840c9dc 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index 845f74a824..d348ed4bdb 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index f35078fd09..48bba02914 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 877889ec8a..e81427db70 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 64824e1d5a..9daf356394 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 4e964a9958..6dc9ad7ec0 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index c3b127a942..25170f8fab 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 7702610cd8..5543ecd6ba 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 4af8de9463..6a84f08932 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 7af53f2105..f115e0fb1c 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index ce6c6c23f7..8b4f903384 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 85e237b212..03bcd6aa78 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 79a072d644..7e3c0c40f1 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 2746d07b94..d18f46bb02 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 342bc9c429..2c9f54cfa9 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 518703f993..298c1263ba 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -13,6 +13,7 @@ import ( "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" "math/big" ) @@ -37,7 +38,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -53,9 +54,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -130,7 +131,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -168,13 +169,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -185,7 +189,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -193,70 +197,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 23d61bfd30..60aaea5bb7 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -12,6 +12,7 @@ import ( "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -51,8 +52,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -72,15 +73,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -88,7 +80,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -99,9 +93,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -151,14 +148,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -276,7 +273,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -342,19 +339,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res -} diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index 88d7aeb797..3b5d1992fc 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -29,11 +29,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -82,209 +77,6 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { for i := range s { if !s[i].IsInSubGroup() { @@ -302,94 +94,3 @@ func areInSubGroupG2(s []curve.G2Affine) bool { } return true } - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res -} diff --git a/go.mod b/go.mod index 81af06ca79..116789e6e2 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.24 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b + github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 diff --git a/go.sum b/go.sum index b11293f5ce..060264b7d0 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,8 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b h1:OHVIrLobpH31u9k2F5RmqFPkasbXcTMZ9E2Jzimu+cM= -github.com/consensys/gnark-crypto v0.14.1-0.20241221144950-f08f759bd65b/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= +github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220 h1:bPI39r97HaofW0dkMtm//yXRoRDtEEEQtg6oS6qMRnM= +github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 88228e79a9..41bd4edb12 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -9,6 +9,7 @@ import ( {{- template "import_fr" . }} {{- template "import_curve" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -32,7 +33,7 @@ type SrsCommons struct { // Also known as "Powers of Tau" type Phase1 struct { proofs struct { - Tau, Alpha, Beta valueUpdate + Tau, Alpha, Beta mpcsetup.UpdateProof } parameters SrsCommons Challenge []byte // Hash of the transcript PRIOR to this participant @@ -48,9 +49,9 @@ func (p *Phase1) Contribute() { tauContrib, alphaContrib, betaContrib fr.Element ) - p.proofs.Tau, tauContrib = newValueUpdate(p.Challenge, 1) - p.proofs.Alpha, alphaContrib = newValueUpdate(p.Challenge, 2) - p.proofs.Beta, betaContrib = newValueUpdate(p.Challenge, 3) + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } @@ -125,7 +126,7 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { - newContribs := beaconContributions(p.hash(), beaconChallenge, 3) + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) return p.parameters } @@ -163,13 +164,16 @@ func (p *Phase1) Verify(next *Phase1) error { } // verify updates to τ, α, β - if err := next.proofs.Tau.verify(pair{p.parameters.G1.Tau[1], nil}, pair{next.parameters.G1.Tau[1], nil}, challenge, 1); err != nil { + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if err := next.proofs.Alpha.verify(pair{p.parameters.G1.AlphaTau[0], nil}, pair{next.parameters.G1.AlphaTau[0], nil}, challenge, 2); err != nil { + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { return fmt.Errorf("failed to verify contribution to α: %w", err) } - if err := next.proofs.Beta.verify(pair{p.parameters.G1.BetaTau[0], &p.parameters.G2.Beta}, pair{next.parameters.G1.BetaTau[0], &next.parameters.G2.Beta}, challenge, 3); err != nil { + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, + {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } @@ -180,7 +184,7 @@ func (p *Phase1) Verify(next *Phase1) error { return errors.New("derived values 𝔾₂ subgroup check failed") } - return multiValueUpdateCheck( + return mpcsetup.SameRatioMany( p.parameters.G1.Tau, p.parameters.G2.Tau, p.parameters.G1.AlphaTau, @@ -188,70 +192,11 @@ func (p *Phase1) Verify(next *Phase1) error { ) } -// multiValueUpdateCheck checks that aᵢ₊₁/aᵢ = bⱼ₊₁/bⱼ = cₖ₊₁/cₖ = dₗ₊₁/dₗ for all applicable i,j,k,l -// in other words it checks that there is x such that aᵢ = xʲa₀, bⱼ = xʲb₀, cₖ = xʲc₀, dₗ = xʲd₀ -func multiValueUpdateCheck(a []curve.G1Affine, b []curve.G2Affine, c, d []curve.G1Affine) error { - // lemma: let K be a field and - // F = ∑ fᵢⱼ XⁱYʲ F' = ∑ f'ᵢⱼ XⁱYʲ - // G = ∑ gᵢ Zⁱ G' = ∑ g'ᵢ Zⁱ - // polynomials in K[X,Y,Z]. - // if F/F' = G/G' - // then F/F' = G/G' ∈ K - // - // view our polynomials in K[X,Y,Z] - // By multiplying out the polynomials we get - // FG' = F'G ⇒ ∑ fᵢⱼg'ₖ XᶦYʲZᵏ = ∑ f'ᵢⱼgₖₗ XᶦYʲZᵏ - // pick i₀ ,j₀ , k₀ where f'ᵢ₀ⱼ₀, g'ₖ₀ ≠ 0 - // let x ≔ fᵢ₀ⱼ₀/f'ᵢ₀ⱼ₀ = gₖ₀/g'ₖ₀ - // now for any i,j: fᵢⱼg'ₖ₀ = f'ᵢⱼgₖ₀ ⇒ - // fᵢⱼ = x f'ᵢⱼ - // likewise for any i: fᵢ₀ⱼ₀g'ᵢ = f'ᵢ₀ⱼ₀gᵢ ⇒ - // gᵢ = x g'ᵢ - - // now we use this to check that: - // 1. aᵢ ≔ G1.Tau[i] = [τⁱ]₁ - // 2. bᵢ ≔ G2.Tau[i] = [τⁱ]₂ - // 3. cᵢ ≔ G1.AlphaTau[i] = [ατⁱ]₁ - // 4. dᵢ ≔ G1.BetaTau[i] = [βτⁱ]₁ - - // construct the polynomials - // F ≔ a₀ + a₁X + ... + a₂ₙ₋₃X²ᴺ⁻³ + c₀Y + c₁XY + ... + cₙ₋₂Xᴺ⁻²Y + d₀Y² + d₁XY² + ... + dₙ₋₂Xᴺ⁻²Y² - // F' ≔ a₁ + a₂X + ... + a₂ₙ₋₂X²ᴺ⁻³ + c₁Y + c₂XY + ... + cₙ₋₁Xᴺ⁻²Y + d₁Y² + d₂XY² + ... + dₙ₋₁Xᴺ⁻²Y² - // G ≔ b₀ + b₁Z + ... + bₙ₋₂Zᴺ⁻² - // G' ≔ b₁ + b₂Z + ... + bₙ₋₁Zᴺ⁻² - - // if F/F' = G/G' we get F/F' = G/G' = a₀/a₁ = 1/τ, which yields: - // for 0 ≤ i ≤ N-2: bᵢ = bᵢ₊₁/τ, cᵢ = cᵢ₊₁/τ, dᵢ = dᵢ₊₁/τ - // for 0 ≤ i ≤ 2N-3: aᵢ = aᵢ₊₁/τ - - // from previous checks we already know: - // 1. a₀ = 1 - // 2. b₀ = 1 - // 3. c₀ = α - // 4. d₀ = β - // and so the desired results follow - - ends := partialSums(len(a), len(c), len(d)) - - g1s := make([]curve.G1Affine, 0, ends[len(ends)-1]) - g1s = append(g1s, a...) - g1s = append(g1s, c...) - g1s = append(g1s, d...) - - g1Num, g1Denom := linearCombinationsG1(g1s, bivariateRandomMonomials(ends...), ends) - g2Num, g2Denom := linearCombinationsG2(b, randomMonomials(len(b))) - - if !sameRatio(g1Num, g1Denom, g2Num, g2Denom) { - return errors.New("multi-value update check failed") - } - - return nil - -} - func (p *Phase1) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 1f2792d447..b646202695 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -13,6 +13,7 @@ import ( {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) // Phase2Evaluations components of the circuit keys @@ -45,8 +46,8 @@ type Phase2 struct { } // Proofs of update correctness - Sigmas []valueUpdate - Delta valueUpdate + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof // Challenge is the hash of the PREVIOUS contribution Challenge []byte @@ -66,15 +67,6 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } - r := randomMonomials(len(next.Parameters.G1.Z) + len(next.Parameters.G1.PKK) + 1) - - verifyContribution := func(update *valueUpdate, g1Denominator, g1Numerator []curve.G1Affine, g2Denominator, g2Numerator *curve.G2Affine, dst byte) error { - g1Num := linearCombination(g1Numerator, r) - g1Denom := linearCombination(g1Denominator, r) - - return update.verify(pair{g1Denom, g2Denominator}, pair{g1Num, g2Numerator}, challenge, dst) - } - // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment @@ -82,7 +74,9 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("commitment proving key subgroup check failed") } - if err := verifyContribution(&next.Sigmas[i], p.Parameters.G1.SigmaCKK[i], next.Parameters.G1.SigmaCKK[i], &p.Parameters.G2.Sigma[i], &next.Parameters.G2.Sigma[i], 2+byte(i)); err != nil { + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } @@ -93,9 +87,12 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("derived values 𝔾₁ subgroup check failed") } - denom := cloneAppend([]curve.G1Affine{p.Parameters.G1.Delta}, next.Parameters.G1.Z, next.Parameters.G1.PKK) - num := cloneAppend([]curve.G1Affine{next.Parameters.G1.Delta}, p.Parameters.G1.Z, p.Parameters.G1.PKK) - if err := verifyContribution(&next.Delta, denom, num, &p.Parameters.G2.Delta, &next.Parameters.G2.Delta, 1); err != nil { + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, + {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, + {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } @@ -145,14 +142,14 @@ func (p *Phase2) Contribute() { // sample value contributions and provide correctness proofs var delta fr.Element - p.Delta, delta = newValueUpdate(p.Challenge, 1) + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) if len(sigma) > 255 { panic("too many commitments") // DST collision } for i := range sigma { - p.Sigmas[i], sigma[i] = newValueUpdate(p.Challenge, byte(2+i)) + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) } p.update(&delta, sigma) @@ -270,7 +267,7 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) - p.Sigmas = make([]valueUpdate, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) @@ -336,19 +333,9 @@ func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c func (p *Phase2) hash() []byte { sha := sha256.New() - p.WriteTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } sha.Write(p.Challenge) return sha.Sum(nil) -} - -func cloneAppend(s ...[]curve.G1Affine) []curve.G1Affine { - l := 0 - for _, s := range s { - l += len(s) - } - res := make([]curve.G1Affine, 0, l) - for _, s := range s { - res = append(res, s...) - } - return res } \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 1d37386c54..d6249e49b4 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -22,11 +22,6 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aᴺ⁻¹ ] for random a -func randomMonomials(N int) []fr.Element { - return bivariateRandomMonomials(N) -} - // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { @@ -75,211 +70,8 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check n₁/d₁ = n₂/d₂ i.e. e(n₁, d₂) = e(d₁, n₂). No subgroup checks. -func sameRatio(n1, d1 curve.G1Affine, n2, d2 curve.G2Affine) bool { - var nd1 curve.G1Affine - nd1.Neg(&d1) - res, err := curve.PairingCheck( - []curve.G1Affine{n1, nd1}, - []curve.G2Affine{d2, n2}) - if err != nil { - panic(err) - } - return res -} - -// returns ∑ rᵢAᵢ -func linearCombination(A []curve.G1Affine, r []fr.Element) curve.G1Affine { - nc := runtime.NumCPU() - var res curve.G1Affine - if _, err := res.MultiExp(A, r[:len(A)], ecc.MultiExpConfig{NbTasks: nc}); err != nil { - panic(err) - } - return res -} - -// linearCombinationsG1 returns -// -// powers[0].A[0] + powers[1].A[1] + ... + powers[ends[0]-2].A[ends[0]-2] -// + powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-2].A[ends[1]-2] -// .... (truncated) -// -// powers[0].A[1] + powers[1].A[2] + ... + powers[ends[0]-2].A[ends[0]-1] -// + powers[ends[0]].A[ends[0]+1] + ... + powers[ends[1]-2].A[ends[1]-1] -// .... (shifted) -// -// It is assumed without checking that powers[i+1] = powers[i]*powers[1] unless i+1 is a partial sum of sizes. -// Also assumed that powers[0] = 1. -// The slices powers and A will be modified -func linearCombinationsG1(A []curve.G1Affine, powers []fr.Element, ends []int) (truncated, shifted curve.G1Affine) { - if ends[len(ends)-1] != len(A) || len(A) != len(powers) { - panic("lengths mismatch") - } - - // zero out the large coefficients - for i := range ends { - powers[ends[i]-1].SetZero() - } - - msmCfg := ecc.MultiExpConfig{NbTasks: runtime.NumCPU()} - - if _, err := truncated.MultiExp(A, powers, msmCfg); err != nil { - panic(err) - } - - var rInvNeg fr.Element - rInvNeg.Inverse(&powers[1]) - rInvNeg.Neg(&rInvNeg) - prevEnd := 0 - - // r⁻¹.truncated = - // r⁻¹.powers[0].A[0] + powers[0].A[1] + ... + powers[ends[0]-3].A[ends[0]-2] - // + r⁻¹.powers[ends[0]].A[ends[0]] + ... + powers[ends[1]-3].A[ends[1]-2] - // ... - // - // compute shifted as - // - r⁻¹.powers[0].A[0] - r⁻¹.powers[ends[0]].A[ends[0]] - ... - // + powers[ends[0]-2].A[ends[0]-1] + powers[ends[1]-2].A[ends[1]-1] + ... - // + r⁻¹.truncated - for i := range ends { - powers[2*i].Mul(&powers[prevEnd], &rInvNeg) - powers[2*i+1] = powers[ends[i]-2] - A[2*i] = A[prevEnd] - A[2*i+1] = A[ends[i]-1] - prevEnd = ends[i] - } - powers[2*len(ends)].Neg(&rInvNeg) // r⁻¹: coefficient for truncated - A[2*len(ends)] = truncated - - // TODO @Tabaie O(1) MSM worth it? - if _, err := shifted.MultiExp(A[:2*len(ends)+1], powers[:2*len(ends)+1], msmCfg); err != nil { - panic(err) - } - - return -} - -// linearCombinationsG2 assumes, and does not check, that rPowers[i+1] = rPowers[1].rPowers[i] for all applicable i -// Also assumed that 3 ≤ N ≔ len(A) ≤ len(rPowers). -// The results are truncated = ∑_{i=0}^{N-2} rⁱAᵢ, shifted = ∑_{i=1}^{N-1} rⁱAᵢ -func linearCombinationsG2(A []curve.G2Affine, rPowers []fr.Element) (truncated, shifted curve.G2Affine) { - - N := len(A) - - if _, err := shifted.MultiExp(A[1:], rPowers[:N-1], ecc.MultiExpConfig{NbTasks: runtime.NumCPU()}); err != nil { - panic(err) - } - - // truncated = r.shifted - rᴺ⁻¹.A[N-1] + A[0] - var ( - x fr.Element - i big.Int - ) - x.Neg(&rPowers[N-2]) - x.BigInt(&i) - truncated. - ScalarMultiplication(&A[N-1], &i). // - rᴺ⁻².A[N-1] - Add(&truncated, &shifted) // shifted - rᴺ⁻².A[N-1] - - rPowers[1].BigInt(&i) - truncated. - ScalarMultiplication(&truncated, &i). // r.shifted - rᴺ⁻¹.A[N-1] - Add(&truncated, &A[0]) // r.shifted - rᴺ⁻¹.A[N-1] + A[0] - - return -} - -// Generate R∈𝔾₂ as Hash(gˢ, gˢˣ, challenge, dst) -// it is to be used as a challenge for generating a proof of knowledge to x -// π ≔ x.r; e([1]₁, π) =﹖ e([x]₁, r) -func genR(sG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed) - buf.Write(sG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) - } - return spG2 -} - -type pair struct { - g1 curve.G1Affine - g2 *curve.G2Affine // optional; some values expect to have a 𝔾₂ representation, some don't. -} - -// check that g1, g2 are valid as updated values, i.e. in their subgroups, and non-zero -func (p *pair) validUpdate() bool { - // if the contribution is 0 the product is doomed to be 0. - // no need to check this for g2 independently because if g1 is 0 and g2 is not, consistency checks will fail - return !p.g1.IsInfinity() && p.g1.IsInSubGroup() && (p.g2 == nil || p.g2.IsInSubGroup()) -} - -type valueUpdate struct { - contributionCommitment curve.G1Affine // x or [Xⱼ]₁ - contributionPok curve.G2Affine // π ≔ x.r ∈ 𝔾₂ -} - -// newValueUpdate produces values associated with contribution to an existing value. -// the second output is toxic waste. It is the caller's responsibility to safely "dispose" of it. -func newValueUpdate(challenge []byte, dst byte) (proof valueUpdate, contributionValue fr.Element) { - if _, err := contributionValue.SetRandom(); err != nil { - panic(err) - } - var contributionValueI big.Int - contributionValue.BigInt(&contributionValueI) - - _, _, gen1, _ := curve.Generators() - proof.contributionCommitment.ScalarMultiplication(&gen1, &contributionValueI) - - // proof of knowledge to commitment. Algorithm 3 from section 3.7 - pokBase := genR(proof.contributionCommitment, challenge, dst) // r - proof.contributionPok.ScalarMultiplication(&pokBase, &contributionValueI) - - return -} - -// TODO @Tabaie batchVerify(denomG1, numG1 []G1Affine, denomG2, numG2 []G2Affine, challenge, dst) -// option for linear combination vector - -// verify corresponds with verification steps {i, i+3} with 1 ≤ i ≤ 3 in section 7.1 of Bowe-Gabizon17 -// it checks the proof of knowledge of the contribution, and the fact that the product of the contribution -// and previous commitment makes the new commitment. -// denom, num are assumed to be valid. No subgroup check and the like. -func (x *valueUpdate) verify(denom, num pair, challenge []byte, dst byte) error { - noG2 := denom.g2 == nil - if noG2 != (num.g2 == nil) { - return errors.New("erasing or creating g2 values") - } - - if !x.contributionPok.IsInSubGroup() || !x.contributionCommitment.IsInSubGroup() || !num.validUpdate() { - return errors.New("contribution values subgroup check failed") - } - - // verify commitment proof of knowledge. CheckPOK, algorithm 4 from section 3.7 - r := genR(x.contributionCommitment, challenge, dst) // verification challenge in the form of a g2 base - _, _, g1, _ := curve.Generators() - if !sameRatio(x.contributionCommitment, g1, x.contributionPok, r) { // π =? x.r i.e. x/g1 =? π/r - return errors.New("contribution proof of knowledge verification failed") - } - - // check that the num/denom ratio is consistent between the 𝔾₁ and 𝔾₂ representations. Based on CONSISTENT, algorithm 2 in Section 3.6. - if !noG2 && !sameRatio(num.g1, denom.g1, *num.g2, *denom.g2) { - return errors.New("g2 update inconsistent") - } - - // now verify that num₁/denom₁ = x ( = x/g1 = π/r ) - // have to use the latter value for the RHS because we sameRatio needs both 𝔾₁ and 𝔾₂ values - if !sameRatio(num.g1, denom.g1, x.contributionPok, r) { - return errors.New("g1 update inconsistent") - } - - return nil -} - func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { + for i := range s { if !s[i].IsInSubGroup() { return false } @@ -288,101 +80,10 @@ func areInSubGroupG1(s []curve.G1Affine) bool { } func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { + for i := range s { if !s[i].IsInSubGroup() { return false } } return true -} - -// bivariateRandomMonomials returns 1, x, ..., x^{ends[0]-1}; y, xy, ..., x^{ends[1]-ends[0]-1}y; ... -// all concatenated in the same slice -func bivariateRandomMonomials(ends ...int) []fr.Element { - if len(ends) == 0 { - return nil - } - - res := make([]fr.Element, ends[len(ends)-1]) - if _, err := res[1].SetRandom(); err != nil { - panic(err) - } - setPowers(res[:ends[0]]) - - if len(ends) == 1 { - return res - } - - y := make([]fr.Element, len(ends)) - if _, err := y[1].SetRandom(); err != nil { - panic(err) - } - setPowers(y) - - for d := 1; d < len(ends); d++ { - xdeg := ends[d] - ends[d-1] - if xdeg > ends[0] { - panic("impl detail: first maximum degree for x must be the greatest") - } - - for i := range xdeg { - res[ends[d-1]+i].Mul(&res[i], &y[d]) - } - } - - return res -} - -// sets x[i] = x[1]ⁱ -func setPowers(x []fr.Element) { - if len(x) == 0 { - return - } - x[0].SetOne() - for i := 2; i < len(x); i++ { - x[i].Mul(&x[i-1], &x[1]) - } -} - -func partialSums(s ...int) []int { - if len(s) == 0 { - return nil - } - sums := make([]int, len(s)) - sums[0] = s[0] - for i := 1; i < len(s); i++ { - sums[i] = sums[i-1] + s[i] - } - return sums -} - -func beaconContributions(hash, beaconChallenge []byte, n int) []fr.Element { - var ( - bb bytes.Buffer - err error - ) - bb.Grow(len(hash) + len(beaconChallenge)) - bb.Write(hash) - bb.Write(beaconChallenge) - - res := make([]fr.Element, 1) - - allNonZero := func() bool { - for i := range res { - if res[i].IsZero() { - return false - } - } - return true - } - - // cryptographically unlikely for this to be run more than once - for !allNonZero() { - if res, err = fr.Hash(bb.Bytes(), []byte("Groth16 SRS generation ceremony - Phase 1 Final Step"), n); err != nil { - panic(err) - } - bb.WriteByte('=') // padding just so that the hash is different next time - } - - return res } \ No newline at end of file From 73dd2861c162f5cb2aac81f5e403601276920650 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 15:05:13 -0600 Subject: [PATCH 092/105] chore: generify --- backend/groth16/bls12-377/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bls12-377/mpcsetup/setup.go | 3 ++- backend/groth16/bls12-377/mpcsetup/utils.go | 4 ---- backend/groth16/bls12-381/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bls12-381/mpcsetup/setup.go | 3 ++- backend/groth16/bls12-381/mpcsetup/utils.go | 4 ---- backend/groth16/bls24-315/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bls24-315/mpcsetup/setup.go | 3 ++- backend/groth16/bls24-315/mpcsetup/utils.go | 4 ---- backend/groth16/bls24-317/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bls24-317/mpcsetup/setup.go | 3 ++- backend/groth16/bls24-317/mpcsetup/utils.go | 4 ---- backend/groth16/bn254/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bn254/mpcsetup/setup.go | 3 ++- backend/groth16/bw6-633/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bw6-633/mpcsetup/setup.go | 3 ++- backend/groth16/bw6-633/mpcsetup/utils.go | 4 ---- backend/groth16/bw6-761/mpcsetup/marshal.go | 21 ++----------------- backend/groth16/bw6-761/mpcsetup/setup.go | 3 ++- backend/groth16/bw6-761/mpcsetup/utils.go | 4 ---- .../groth16/mpcsetup/marshal.go.tmpl | 21 ++----------------- .../zkpschemes/groth16/mpcsetup/setup.go.tmpl | 3 ++- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 4 ---- 23 files changed, 32 insertions(+), 188 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/marshal.go b/backend/groth16/bls12-377/mpcsetup/marshal.go index d2b4bf3ab9..8a1ccb0004 100644 --- a/backend/groth16/bls12-377/mpcsetup/marshal.go +++ b/backend/groth16/bls12-377/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bls12-377/mpcsetup/setup.go b/backend/groth16/bls12-377/mpcsetup/setup.go index 0b1a5afd62..3304ba0350 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup.go +++ b/backend/groth16/bls12-377/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-377" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index 48bba02914..3cb4114fca 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/backend/groth16/bls12-381/mpcsetup/marshal.go b/backend/groth16/bls12-381/mpcsetup/marshal.go index 1889993f18..33d1d82b84 100644 --- a/backend/groth16/bls12-381/mpcsetup/marshal.go +++ b/backend/groth16/bls12-381/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bls12-381/mpcsetup/setup.go b/backend/groth16/bls12-381/mpcsetup/setup.go index 48708f63fb..0a4e25ac64 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup.go +++ b/backend/groth16/bls12-381/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-381" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 6dc9ad7ec0..e5ca0f1ff8 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/backend/groth16/bls24-315/mpcsetup/marshal.go b/backend/groth16/bls24-315/mpcsetup/marshal.go index a324c2c11c..8d60d4f533 100644 --- a/backend/groth16/bls24-315/mpcsetup/marshal.go +++ b/backend/groth16/bls24-315/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bls24-315/mpcsetup/setup.go b/backend/groth16/bls24-315/mpcsetup/setup.go index be7e152ccd..3691bf5a73 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup.go +++ b/backend/groth16/bls24-315/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-315" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 6a84f08932..7978e7ef1c 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/backend/groth16/bls24-317/mpcsetup/marshal.go b/backend/groth16/bls24-317/mpcsetup/marshal.go index d00cb1d5d9..5dd3ef79a8 100644 --- a/backend/groth16/bls24-317/mpcsetup/marshal.go +++ b/backend/groth16/bls24-317/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bls24-317/mpcsetup/setup.go b/backend/groth16/bls24-317/mpcsetup/setup.go index 42b6c9e4b7..f5c4c2e625 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup.go +++ b/backend/groth16/bls24-317/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-317" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 03bcd6aa78..c3b6dd8b15 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index e8125b17d1..7110b846d2 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index 4c575abe22..73162f1b45 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bw6-633/mpcsetup/marshal.go b/backend/groth16/bw6-633/mpcsetup/marshal.go index 2cd0c0f03f..d9bfd04c81 100644 --- a/backend/groth16/bw6-633/mpcsetup/marshal.go +++ b/backend/groth16/bw6-633/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bw6-633/mpcsetup/setup.go b/backend/groth16/bw6-633/mpcsetup/setup.go index c5036ffa21..cd7a73605f 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup.go +++ b/backend/groth16/bw6-633/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-633" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 2c9f54cfa9..984ec36d16 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/backend/groth16/bw6-761/mpcsetup/marshal.go b/backend/groth16/bw6-761/mpcsetup/marshal.go index fdfac89464..8c0d3265d0 100644 --- a/backend/groth16/bw6-761/mpcsetup/marshal.go +++ b/backend/groth16/bw6-761/mpcsetup/marshal.go @@ -8,6 +8,7 @@ package mpcsetup import ( "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" "github.com/consensys/gnark/internal/utils" "io" ) @@ -113,7 +114,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -247,21 +248,3 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } return dec.BytesRead(), nil } - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err -} diff --git a/backend/groth16/bw6-761/mpcsetup/setup.go b/backend/groth16/bw6-761/mpcsetup/setup.go index bebfdca2dc..9117ac8099 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup.go +++ b/backend/groth16/bw6-761/mpcsetup/setup.go @@ -9,6 +9,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" "github.com/consensys/gnark/backend/groth16" groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-761" ) @@ -23,7 +24,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index 3b5d1992fc..0cd18fa237 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -6,15 +6,11 @@ package mpcsetup import ( - "bytes" - "errors" - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" - "runtime" ) func bitReverse[T any](a []T) { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl index 89398bf704..a76381102d 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl @@ -4,6 +4,7 @@ import ( "github.com/consensys/gnark/internal/utils" {{- template "import_curve" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) @@ -108,7 +109,7 @@ func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { } n := int64(2) // we've definitely successfully read 2 bytes - p.Sigmas = make([]valueUpdate, nbCommitments) + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) @@ -241,22 +242,4 @@ func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { } } return dec.BytesRead(), nil -} - -func (x *valueUpdate) WriteTo(writer io.Writer) (n int64, err error) { - enc := curve.NewEncoder(writer) - if err = enc.Encode(&x.contributionCommitment); err != nil { - return enc.BytesWritten(), err - } - err = enc.Encode(&x.contributionPok) - return enc.BytesWritten(), err -} - -func (x *valueUpdate) ReadFrom(reader io.Reader) (n int64, err error) { - dec := curve.NewDecoder(reader) - if err = dec.Decode(&x.contributionCommitment); err != nil { - return dec.BytesRead(), err - } - err = dec.Decode(&x.contributionPok) - return dec.BytesRead(), err } \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl index 89645c1154..55b3770c04 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl @@ -5,6 +5,7 @@ import ( {{- template "import_curve" . }} {{- template "import_fft" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) // Seal performs the final contribution and outputs the proving and verifying keys. @@ -17,7 +18,7 @@ import ( func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { // final contributions - contributions := beaconContributions(p.hash(), beaconChallenge, 1+len(p.Sigmas)) + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) p.update(&contributions[0], contributions[1:]) _, _, _, g2 := curve.Generators() diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index d6249e49b4..1361199e11 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -1,10 +1,6 @@ import ( - "bytes" "math/big" "math/bits" - "runtime" - "errors" - "github.com/consensys/gnark-crypto/ecc" {{- template "import_fr" . }} {{- template "import_curve" . }} "github.com/consensys/gnark/internal/utils" From 94a58c57fd551a1ec4444c707f471fde114746b3 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 15:35:43 -0600 Subject: [PATCH 093/105] build: update gnark-crypto dep --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 116789e6e2..521c59718f 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/blang/semver/v4 v4.0.0 github.com/consensys/bavard v0.1.24 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220 + github.com/consensys/gnark-crypto v0.14.1-0.20241225213359-ae513c299a87 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 diff --git a/go.sum b/go.sum index 060264b7d0..24040eb35a 100644 --- a/go.sum +++ b/go.sum @@ -61,8 +61,8 @@ github.com/consensys/bavard v0.1.24 h1:Lfe+bjYbpaoT7K5JTFoMi5wo9V4REGLvQQbHmatoN github.com/consensys/bavard v0.1.24/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220 h1:bPI39r97HaofW0dkMtm//yXRoRDtEEEQtg6oS6qMRnM= -github.com/consensys/gnark-crypto v0.14.1-0.20241224223818-dbea2d722220/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= +github.com/consensys/gnark-crypto v0.14.1-0.20241225213359-ae513c299a87 h1:2a1PROstBtv7IgQOf6LicNzD8H537kTXrb/UkRLm2i0= +github.com/consensys/gnark-crypto v0.14.1-0.20241225213359-ae513c299a87/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= From 0f6d281cfea630f2d2899fcd039fcb1233ab64b9 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 22:50:46 -0600 Subject: [PATCH 094/105] fix: utils.Max -> max --- std/recursion/groth16/verifier.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/std/recursion/groth16/verifier.go b/std/recursion/groth16/verifier.go index 067158b441..77ace9073e 100644 --- a/std/recursion/groth16/verifier.go +++ b/std/recursion/groth16/verifier.go @@ -13,7 +13,6 @@ import ( fr_bn254 "github.com/consensys/gnark-crypto/ecc/bn254/fr" bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761" fr_bw6761 "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" groth16backend_bls12377 "github.com/consensys/gnark/backend/groth16/bls12-377" groth16backend_bls12381 "github.com/consensys/gnark/backend/groth16/bls12-381" @@ -635,7 +634,7 @@ func (v *Verifier[FR, G1El, G2El, GtEl]) AssertProof(vk VerifyingKey[G1El, G2El, maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentAuxData := make([]*emulated.Element[FR], len(vk.PublicAndCommitmentCommitted)) From 67cebb23f4cb044e91ae62c37bed8dc779a7ae52 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 23:43:25 -0600 Subject: [PATCH 095/105] fix staticcheck --- backend/groth16/bn254/mpcsetup/phase1.go | 4 ++-- backend/groth16/bn254/mpcsetup/phase2.go | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index d64db75e47..be33ddf90b 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 01bf93d4f6..1c0461ab31 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } From c549cbcb25c8c8ee8d7c4a2e0082faaf30e97c17 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 25 Dec 2024 23:45:54 -0600 Subject: [PATCH 096/105] chore: generify staticcheck fix --- backend/groth16/bls12-377/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls12-377/mpcsetup/phase2.go | 8 ++++---- backend/groth16/bls12-381/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls12-381/mpcsetup/phase2.go | 8 ++++---- backend/groth16/bls24-315/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls24-315/mpcsetup/phase2.go | 8 ++++---- backend/groth16/bls24-317/mpcsetup/phase1.go | 4 ++-- backend/groth16/bls24-317/mpcsetup/phase2.go | 8 ++++---- backend/groth16/bw6-633/mpcsetup/phase1.go | 4 ++-- backend/groth16/bw6-633/mpcsetup/phase2.go | 8 ++++---- backend/groth16/bw6-761/mpcsetup/phase1.go | 4 ++-- backend/groth16/bw6-761/mpcsetup/phase2.go | 8 ++++---- .../template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl | 4 ++-- .../template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl | 8 ++++---- 14 files changed, 42 insertions(+), 42 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index b28840c9dc..bbc72fc14f 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index d348ed4bdb..963fd1ad10 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index e81427db70..d1c030b857 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 9daf356394..bfdb3d9dd2 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index 25170f8fab..30e474b095 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 5543ecd6ba..3f1b594bb3 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index f115e0fb1c..55e95a7e4d 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 8b4f903384..cc1ea21bdb 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 7e3c0c40f1..f6c6c65fae 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index d18f46bb02..7645cd3c6e 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 298c1263ba..f16ef075c6 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -176,8 +176,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 60aaea5bb7..60f1dafac6 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -94,10 +94,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 41bd4edb12..42743a576f 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -171,8 +171,8 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to α: %w", err) } if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ - {&p.parameters.G1.BetaTau[0], &next.parameters.G1.BetaTau[0]}, - {&p.parameters.G2.Beta, &next.parameters.G2.Beta}, + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, }...); err != nil { return fmt.Errorf("failed to verify contribution to β: %w", err) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index b646202695..03527c95d1 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -88,10 +88,10 @@ func (p *Phase2) Verify(next *Phase2) error { } if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ - {&p.Parameters.G1.Delta, &next.Parameters.G1.Delta}, - {&p.Parameters.G2.Delta, &next.Parameters.G2.Delta}, - {next.Parameters.G1.Z, p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" - {next.Parameters.G1.PKK, p.Parameters.G1.PKK}, + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, }...); err != nil { return fmt.Errorf("failed to verify contribution to δ: %w", err) } From f0e446fcdb4b9c62a25f8a6a51c5d342417fa744 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Tue, 21 Jan 2025 13:04:00 -0600 Subject: [PATCH 097/105] build: update gnark-crypto --- go.mod | 14 +++++++------- go.sum | 28 ++++++++++++++-------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/go.mod b/go.mod index caa4a8e03a..dbeef78306 100644 --- a/go.mod +++ b/go.mod @@ -5,11 +5,11 @@ go 1.22 toolchain go1.22.6 require ( - github.com/bits-and-blooms/bitset v1.14.2 + github.com/bits-and-blooms/bitset v1.20.0 github.com/blang/semver/v4 v4.0.0 - github.com/consensys/bavard v0.1.25 + github.com/consensys/bavard v0.1.27 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6 + github.com/consensys/gnark-crypto v0.15.0 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 @@ -18,10 +18,10 @@ require ( github.com/leanovate/gopter v0.2.11 github.com/ronanh/intcomp v1.1.0 github.com/rs/zerolog v1.33.0 - github.com/stretchr/testify v1.9.0 - golang.org/x/crypto v0.31.0 + github.com/stretchr/testify v1.10.0 + golang.org/x/crypto v0.32.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 - golang.org/x/sync v0.8.0 + golang.org/x/sync v0.10.0 ) require ( @@ -31,7 +31,7 @@ require ( github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/x448/float16 v0.8.4 // indirect - golang.org/x/sys v0.28.0 // indirect + golang.org/x/sys v0.29.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) diff --git a/go.sum b/go.sum index e20d4967aa..5f24bbd3a5 100644 --- a/go.sum +++ b/go.sum @@ -44,8 +44,8 @@ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hC github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bits-and-blooms/bitset v1.14.2 h1:YXVoyPndbdvcEVcseEovVfp0qjJp7S+i5+xgp/Nfbdc= -github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.20.0 h1:2F+rfL86jE2d/bmw7OhqUg2Sj/1rURkBn3MdfoPyRVU= +github.com/bits-and-blooms/bitset v1.20.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= @@ -57,12 +57,12 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/consensys/bavard v0.1.25 h1:5YcSBnp03/HvfpKaIQLr/ecspTp2k8YNR5rQLOWvUyc= -github.com/consensys/bavard v0.1.25/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= +github.com/consensys/bavard v0.1.27 h1:j6hKUrGAy/H+gpNrpLU3I26n1yc+VMGmd6ID5+gAhOs= +github.com/consensys/bavard v0.1.27/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6 h1:P4DeR8HYfQGl4Vj6KEv0Eszcokroit/U1dRrUsgt+js= -github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6/go.mod h1:q9s22Y0WIHd9UCBfD+xGeW8wDJ7WAGZZpMrLFqzBzrQ= +github.com/consensys/gnark-crypto v0.15.0 h1:OXsWnhheHV59eXIzhL5OIexa/vqTK8wtRYQCtwfMDtY= +github.com/consensys/gnark-crypto v0.15.0/go.mod h1:Ke3j06ndtPTVvo++PhGNgvm+lgpLvzbcE2MqljY7diU= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -271,8 +271,8 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -304,8 +304,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= -golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -410,8 +410,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -462,8 +462,8 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= From 2766ed74dc880204a85328a7c4bb48240362a8ea Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 22 Jan 2025 14:28:52 -0600 Subject: [PATCH 098/105] perf: parallelize subgroup checks --- backend/groth16/bls24-317/mpcsetup/phase1.go | 44 ++++++++++++++--- backend/groth16/bls24-317/mpcsetup/phase2.go | 51 +++++++++++++++----- backend/groth16/bls24-317/mpcsetup/utils.go | 42 +++++++++++----- 3 files changed, 108 insertions(+), 29 deletions(-) diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 285bc6b979..14ca84515b 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 0d9045327b..25371e1385 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 7be1c2812b..484355f336 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } From 6542b21f2831982a79e8e5f330ef15e99399f2a5 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 22 Jan 2025 14:38:06 -0600 Subject: [PATCH 099/105] build: generify parallel subgroup checks --- backend/groth16/bls12-377/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bls12-377/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bls12-377/mpcsetup/utils.go | 42 +++++++++++---- backend/groth16/bls12-381/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bls12-381/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bls12-381/mpcsetup/utils.go | 42 +++++++++++---- backend/groth16/bls24-315/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bls24-315/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bls24-315/mpcsetup/utils.go | 42 +++++++++++---- backend/groth16/bn254/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bn254/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bn254/mpcsetup/utils.go | 42 +++++++++++---- backend/groth16/bw6-633/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bw6-633/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bw6-633/mpcsetup/utils.go | 42 +++++++++++---- backend/groth16/bw6-761/mpcsetup/phase1.go | 44 +++++++++++++--- backend/groth16/bw6-761/mpcsetup/phase2.go | 51 ++++++++++++++----- backend/groth16/bw6-761/mpcsetup/utils.go | 42 +++++++++++---- .../groth16/mpcsetup/phase1.go.tmpl | 44 +++++++++++++--- .../groth16/mpcsetup/phase2.go.tmpl | 51 ++++++++++++++----- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 42 +++++++++++---- 21 files changed, 756 insertions(+), 203 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index 598435126f..af5f2411b9 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index 70fc8e9528..9647ea7dc6 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index 0bbf04d236..6f177e7b55 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 35fceda3b2..68844946db 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 4e0abb8193..feeb2d2d20 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index 5afa1d9b99..c0ca8e959d 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index 9af398d11d..49314faca0 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 79e11d450a..ea6a258ef6 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index fce9cce80d..a51e8a78aa 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1cb6588b69..6e8466b869 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 95bb5a8c2f..01a8ddcd3d 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index ac9f947363..d2a99f5b84 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index a011d74fab..35aa74ac56 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 8353fd97d9..4dff1f9757 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 6a6c2af8b7..8a3fb24b5b 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 35ccfad0d2..b90b700471 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -14,7 +14,9 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) // SrsCommons are the circuit-independent components of the Groth16 SRS, @@ -142,8 +144,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -152,7 +156,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -182,11 +186,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index d706dd7b66..92977711d8 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -13,6 +13,7 @@ import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -20,6 +21,7 @@ import ( "github.com/consensys/gnark/internal/utils" "math/big" "slices" + "sync" ) // Phase2Evaluations components of the circuit keys @@ -59,7 +61,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -73,14 +75,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -89,11 +118,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -326,8 +351,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index fea2257db5..93ddb32092 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -8,9 +8,11 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" "math/big" "math/bits" + "sync" ) func bitReverse[T any](a []T) { @@ -73,20 +75,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 42743a576f..11be587664 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -3,8 +3,10 @@ import ( "errors" "fmt" "github.com/consensys/gnark-crypto/ecc" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" "bytes" + "sync" {{- template "import_fr" . }} @@ -137,8 +139,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return SrsCommons{}, err } prev = c[i] @@ -147,7 +151,7 @@ func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, e } // Verify assumes previous is correct -func (p *Phase1) Verify(next *Phase1) error { +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { @@ -177,11 +181,39 @@ func (p *Phase1) Verify(next *Phase1) error { return fmt.Errorf("failed to verify contribution to β: %w", err) } - if !areInSubGroupG1(next.parameters.G1.Tau[2:]) || !areInSubGroupG1(next.parameters.G1.BetaTau[1:]) || !areInSubGroupG1(next.parameters.G1.AlphaTau[1:]) { - return errors.New("derived values 𝔾₁ subgroup check failed") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - if !areInSubGroupG2(next.parameters.G2.Tau[2:]) { - return errors.New("derived values 𝔾₂ subgroup check failed") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var ( + err error + wg [4]*sync.WaitGroup + ) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err } return mpcsetup.SameRatioMany( diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 03527c95d1..c6dd4df4c7 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -6,6 +6,7 @@ import ( "math/big" "fmt" "slices" + "sync" "github.com/consensys/gnark/backend/groth16" "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" @@ -14,6 +15,7 @@ import ( {{- template "import_curve" . }} {{- template "import_backend_cs" . }} "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" ) // Phase2Evaluations components of the circuit keys @@ -53,7 +55,7 @@ type Phase2 struct { Challenge []byte } -func (p *Phase2) Verify(next *Phase2) error { +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") @@ -67,14 +69,41 @@ func (p *Phase2) Verify(next *Phase2) error { return errors.New("contribution size mismatch") } + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + var err error + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + err = fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + if err != nil { + return err + } + // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if !areInSubGroupG1(next.Parameters.G1.SigmaCKK[i]) { - return errors.New("commitment proving key subgroup check failed") - } - - if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + if err = next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -83,11 +112,7 @@ func (p *Phase2) Verify(next *Phase2) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if !areInSubGroupG1(next.Parameters.G1.Z) || !areInSubGroupG1(next.Parameters.G1.PKK) { - return errors.New("derived values 𝔾₁ subgroup check failed") - } - - if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" @@ -320,8 +345,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { prev := new(Phase2) evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() for i := range c { - if err := prev.Verify(c[i]); err != nil { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { return nil, nil, err } prev = c[i] diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 1361199e11..56ec2c5362 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -3,7 +3,9 @@ import ( "math/bits" {{- template "import_fr" . }} {{- template "import_curve" . }} + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "sync" ) func bitReverse[T any](a []T) { @@ -66,20 +68,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -func areInSubGroupG1(s []curve.G1Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } - } - return true + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -func areInSubGroupG2(s []curve.G2Affine) bool { - for i := range s { - if !s[i].IsInSubGroup() { - return false +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } } + }, 1024) // TODO @Tabaie experimentally optimize minBlock +} + +type verificationSettings struct { + wp *gcUtils.WorkerPool +} + +type verificationOption func(*verificationSettings) + +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return true } \ No newline at end of file From 304248251774167e6da3863446576eda467bc4f9 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 22 Jan 2025 15:25:18 -0600 Subject: [PATCH 100/105] fix: error channels --- backend/groth16/bls12-377/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bls12-377/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bls12-381/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bls12-381/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bls24-315/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bls24-315/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bls24-317/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bls24-317/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bn254/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bn254/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bw6-633/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bw6-633/mpcsetup/phase2.go | 15 +++++++------ backend/groth16/bw6-761/mpcsetup/phase1.go | 21 ++++++++++--------- backend/groth16/bw6-761/mpcsetup/phase2.go | 15 +++++++------ .../groth16/mpcsetup/phase1.go.tmpl | 21 ++++++++++--------- .../groth16/mpcsetup/phase2.go.tmpl | 17 ++++++++------- 16 files changed, 161 insertions(+), 129 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index af5f2411b9..2fc5591dd0 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index 9647ea7dc6..a280875fe1 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 68844946db..7f05922e64 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index feeb2d2d20..84000e3d57 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index 49314faca0..72b5aae074 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index ea6a258ef6..433f0b674b 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 14ca84515b..019d4d11d5 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 25371e1385..47287da0fd 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 6e8466b869..1857fea2ac 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index 01a8ddcd3d..e30a92bcac 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 35aa74ac56..6b35f01e5c 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 4dff1f9757..2f13c18fec 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index b90b700471..c4d6015d34 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -197,28 +197,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 92977711d8..926f669c96 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -86,10 +86,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -102,14 +102,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -118,7 +121,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 11be587664..addaac9728 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -192,28 +192,29 @@ func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { defer wp.Stop() } - var ( - err error - wg [4]*sync.WaitGroup - ) + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) }) wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { - err = fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) }) wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { - err = fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) }) wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { - err = fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) }) for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } return mpcsetup.SameRatioMany( diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index c6dd4df4c7..0b55d041c3 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -56,7 +56,7 @@ type Phase2 struct { } func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { - challenge := p.hash() +challenge := p.hash() if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { return errors.New("the challenge does not match the previous contribution's hash") } @@ -80,10 +80,10 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { defer wp.Stop() } - var err error + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) subGroupErrorReporterNoOffset := func(format string) func(int) { return func(i int) { - err = fmt.Errorf(format+" representation not in subgroup", i) + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } @@ -96,14 +96,17 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { for _, wg := range wg { wg.Wait() } - if err != nil { - return err + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } } // verify proof of knowledge of contributions to the σᵢ // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment - if err = next.Sigmas[i].Verify(challenge, 2+byte(i), + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) @@ -112,7 +115,7 @@ func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { // verify proof of knowledge of contribution to δ // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] - if err = next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" From 27a82d51fa6a85e4ad53cb145f2411320727bc77 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Wed, 22 Jan 2025 15:38:21 -0600 Subject: [PATCH 101/105] refactor: simplify "powers" --- backend/groth16/bls12-377/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bls12-381/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bls24-315/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bls24-317/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bn254/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bw6-633/mpcsetup/utils.go | 13 +++++-------- backend/groth16/bw6-761/mpcsetup/utils.go | 13 +++++-------- .../zkpschemes/groth16/mpcsetup/utils.go.tmpl | 13 +++++-------- 8 files changed, 40 insertions(+), 64 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index 6f177e7b55..042f057a3d 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index c0ca8e959d..e41c4c1cd5 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index a51e8a78aa..e9ac6c2ac4 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 484355f336..2b86844e13 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index d2a99f5b84..87f576f6cf 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 8a3fb24b5b..9645693768 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index 93ddb32092..cbf3416716 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -29,15 +29,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 56ec2c5362..5409547026 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -22,15 +22,12 @@ func bitReverse[T any](a []T) { // Returns [1, a, a², ..., aᴺ⁻¹ ] func powers(a *fr.Element, N int) []fr.Element { - - result := make([]fr.Element, N) - if N >= 1 { - result[0].SetOne() + if N == 0 { + return nil } - if N >= 2 { - result[1].Set(a) - } - for i := 2; i < N; i++ { + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { result[i].Mul(&result[i-1], a) } return result From 75993d6ece1542cb298adc207dfad8437100f18a Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 23 Jan 2025 10:25:34 -0600 Subject: [PATCH 102/105] revert: remove conditionallog --- internal/utils/test_utils/test_utils.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/internal/utils/test_utils/test_utils.go b/internal/utils/test_utils/test_utils.go index 01f5c00a8a..51f0a9d085 100644 --- a/internal/utils/test_utils/test_utils.go +++ b/internal/utils/test_utils/test_utils.go @@ -4,24 +4,9 @@ import ( "bytes" "github.com/stretchr/testify/require" "io" - "log" "testing" ) -var ConditionalLoggerEnabled bool - -func ConditionalLog(v ...any) { - if ConditionalLoggerEnabled { - log.Println(v...) - } -} - -func ConditionalLogf(format string, v ...any) { - if ConditionalLoggerEnabled { - log.Printf(format, v...) - } -} - // Range (n, startingPoints...) = [startingPoints[0], startingPoints[0]+1, ..., startingPoints[0]+n-1, startingPoints[1], startingPoints[1]+1, ...,] // or [0, 1, ..., n-1] if startingPoints is empty func Range(n int, startingPoints ...int) []int { From 3e8c434c65e82f66dba6e12c71359a443b29cefd Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 23 Jan 2025 10:34:43 -0600 Subject: [PATCH 103/105] remove deadcode --- backend/groth16/bls12-377/mpcsetup/utils.go | 8 -------- backend/groth16/bls12-381/mpcsetup/utils.go | 8 -------- backend/groth16/bls24-315/mpcsetup/utils.go | 8 -------- backend/groth16/bls24-317/mpcsetup/utils.go | 8 -------- backend/groth16/bn254/mpcsetup/utils.go | 8 -------- backend/groth16/bw6-633/mpcsetup/utils.go | 8 -------- backend/groth16/bw6-761/mpcsetup/utils.go | 8 -------- .../template/zkpschemes/groth16/mpcsetup/utils.go.tmpl | 8 -------- 8 files changed, 64 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index 042f057a3d..bdc0e0d268 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index e41c4c1cd5..8c1fffee49 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index e9ac6c2ac4..4240bca495 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 2b86844e13..2e837140bf 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bn254/mpcsetup/utils.go b/backend/groth16/bn254/mpcsetup/utils.go index 87f576f6cf..0923799e39 100644 --- a/backend/groth16/bn254/mpcsetup/utils.go +++ b/backend/groth16/bn254/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 9645693768..622e86d652 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index cbf3416716..3162f45d10 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -43,10 +43,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -59,10 +55,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index 5409547026..43247ff5d9 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -36,10 +36,6 @@ func powers(a *fr.Element, N int) []fr.Element { // Returns [aᵢAᵢ, ...]∈𝔾₁ // it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { @@ -52,10 +48,6 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { // Returns [aᵢAᵢ, ...]∈𝔾₂ // it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { - /*if a[0].IsOne() { - A = A[1:] - a = a[1:] - }*/ utils.Parallelize(len(A), func(start, end int) { var tmp big.Int for i := start; i < end; i++ { From be3c9092c48a6c4d3baa67a46165ab8e966e9227 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 23 Jan 2025 10:48:52 -0600 Subject: [PATCH 104/105] docs: beacon --- backend/groth16/bls12-377/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bls12-377/mpcsetup/phase2.go | 5 ++++- backend/groth16/bls12-377/mpcsetup/setup.go | 5 ++++- backend/groth16/bls12-381/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bls12-381/mpcsetup/phase2.go | 5 ++++- backend/groth16/bls12-381/mpcsetup/setup.go | 5 ++++- backend/groth16/bls24-315/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bls24-315/mpcsetup/phase2.go | 5 ++++- backend/groth16/bls24-315/mpcsetup/setup.go | 5 ++++- backend/groth16/bls24-317/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bls24-317/mpcsetup/phase2.go | 5 ++++- backend/groth16/bls24-317/mpcsetup/setup.go | 5 ++++- backend/groth16/bn254/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bn254/mpcsetup/phase2.go | 5 ++++- backend/groth16/bn254/mpcsetup/setup.go | 5 ++++- backend/groth16/bw6-633/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bw6-633/mpcsetup/phase2.go | 5 ++++- backend/groth16/bw6-633/mpcsetup/setup.go | 5 ++++- backend/groth16/bw6-761/mpcsetup/phase1.go | 10 ++++++++-- backend/groth16/bw6-761/mpcsetup/phase2.go | 5 ++++- backend/groth16/bw6-761/mpcsetup/setup.go | 5 ++++- .../zkpschemes/groth16/mpcsetup/phase1.go.tmpl | 10 ++++++++-- .../zkpschemes/groth16/mpcsetup/phase2.go.tmpl | 5 ++++- .../template/zkpschemes/groth16/mpcsetup/setup.go.tmpl | 5 ++++- 24 files changed, 128 insertions(+), 32 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index 2fc5591dd0..8a41f52aa5 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index a280875fe1..a72b3fc437 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bls12-377/mpcsetup/setup.go b/backend/groth16/bls12-377/mpcsetup/setup.go index 14f7916562..70fc155f54 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup.go +++ b/backend/groth16/bls12-377/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index 7f05922e64..d1e9106f3d 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index 84000e3d57..b8a8b45fdb 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bls12-381/mpcsetup/setup.go b/backend/groth16/bls12-381/mpcsetup/setup.go index a7657c50a6..cdb6fc9d51 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup.go +++ b/backend/groth16/bls12-381/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index 72b5aae074..babe035253 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 433f0b674b..a957abf549 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bls24-315/mpcsetup/setup.go b/backend/groth16/bls24-315/mpcsetup/setup.go index 448e86ea30..95f7e90f4b 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup.go +++ b/backend/groth16/bls24-315/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 019d4d11d5..bbb5bae69b 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 47287da0fd..184e34aec3 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bls24-317/mpcsetup/setup.go b/backend/groth16/bls24-317/mpcsetup/setup.go index 4259f34e98..c659254d23 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup.go +++ b/backend/groth16/bls24-317/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 1857fea2ac..d03761007f 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index e30a92bcac..65195b10d0 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index e189bee6dc..49f31260d2 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 6b35f01e5c..6c363dc848 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 2f13c18fec..657f699c66 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bw6-633/mpcsetup/setup.go b/backend/groth16/bw6-633/mpcsetup/setup.go index 1f0d78f7bc..e034125ffd 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup.go +++ b/backend/groth16/bw6-633/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index c4d6015d34..b60149235e 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -129,7 +129,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -139,7 +142,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 926f669c96..d5d629c201 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -348,7 +348,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/backend/groth16/bw6-761/mpcsetup/setup.go b/backend/groth16/bw6-761/mpcsetup/setup.go index 3b0e4c8764..c94333e007 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup.go +++ b/backend/groth16/bw6-761/mpcsetup/setup.go @@ -18,7 +18,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index addaac9728..241c553b04 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -124,7 +124,10 @@ func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { @@ -134,7 +137,10 @@ func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { } // VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 0b55d041c3..f6def165bc 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -342,7 +342,10 @@ func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluation // VerifyPhase2 for circuit described by r1cs // using parameters from commons -// beaconChallenge is the output of the random beacon +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // and c are the output from the contributors // WARNING: the last contribution object will be modified func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl index 55b3770c04..a1367e6c9b 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl @@ -12,7 +12,10 @@ import ( // No randomization is performed at this step. // A verifier should simply re-run this and check // that it produces the same values. -// The inner workings of the random beacon are out of scope. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. // WARNING: Seal modifies p, just as Contribute does. // The result will be an INVALID Phase1 object, since no proof of correctness is produced. func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { From b70d0ae367413ef44704739b9e1f27a407765dd2 Mon Sep 17 00:00:00 2001 From: Arya Tabaie <15056835+Tabaie@users.noreply.github.com> Date: Thu, 23 Jan 2025 10:58:36 -0600 Subject: [PATCH 105/105] fix: use sync.Once correctly --- .../groth16/bls12-377/mpcsetup/setup_test.go | 20 +++++++++++-------- .../groth16/bls12-381/mpcsetup/setup_test.go | 20 +++++++++++-------- .../groth16/bls24-315/mpcsetup/setup_test.go | 20 +++++++++++-------- .../groth16/bls24-317/mpcsetup/setup_test.go | 20 +++++++++++-------- backend/groth16/bn254/mpcsetup/setup_test.go | 20 +++++++++++-------- backend/groth16/bn254/mpcsetup/unit_test.go | 8 ++++---- .../groth16/bw6-633/mpcsetup/setup_test.go | 20 +++++++++++-------- .../groth16/bw6-761/mpcsetup/setup_test.go | 20 +++++++++++-------- .../groth16/mpcsetup/setup_test.go.tmpl | 20 +++++++++++-------- 9 files changed, 100 insertions(+), 68 deletions(-) diff --git a/backend/groth16/bls12-377/mpcsetup/setup_test.go b/backend/groth16/bls12-377/mpcsetup/setup_test.go index b74e22a20f..668543b38e 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-377/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bls12-381/mpcsetup/setup_test.go b/backend/groth16/bls12-381/mpcsetup/setup_test.go index a8062cf07c..134c97bb86 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-381/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bls24-315/mpcsetup/setup_test.go b/backend/groth16/bls24-315/mpcsetup/setup_test.go index 03931745cc..6121a3b543 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-315/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bls24-317/mpcsetup/setup_test.go b/backend/groth16/bls24-317/mpcsetup/setup_test.go index 8849302d13..e72874bf88 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-317/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index dc9d345ab7..290ffd2e62 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go index f96b2ec2dd..2449423b87 100644 --- a/backend/groth16/bn254/mpcsetup/unit_test.go +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -25,7 +25,7 @@ import ( func TestSetupBeaconOnly(t *testing.T) { // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) var ( @@ -135,7 +135,7 @@ func TestPowers(t *testing.T) { func TestCommons(t *testing.T) { // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) var p1 Phase1 @@ -176,7 +176,7 @@ func assertPairingsEqual(t *testing.T, p1 curve.G1Affine, p2 curve.G2Affine, q1 } func TestPedersen(t *testing.T) { - cs := getTestCircuit(t) + cs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(cs.GetNbConstraints())) commons := commonsSmallValues(domainSize, 2, 3, 4) @@ -213,7 +213,7 @@ func TestPhase2Serialization(t *testing.T) { require.NoError(t, err) testRoundtrip(_cs) - testRoundtrip(getTestCircuit(t)) + testRoundtrip(getTestCircuit()) } type tinyCircuit struct { diff --git a/backend/groth16/bw6-633/mpcsetup/setup_test.go b/backend/groth16/bw6-633/mpcsetup/setup_test.go index efa1d0544a..e551c04ccb 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-633/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/backend/groth16/bw6-761/mpcsetup/setup_test.go b/backend/groth16/bw6-761/mpcsetup/setup_test.go index ab41ba3e66..506ca5cd9f 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-761/mpcsetup/setup_test.go @@ -34,7 +34,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -189,12 +189,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -206,7 +210,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl index 64f52ed3cc..bfd0ff50f2 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl @@ -28,7 +28,7 @@ func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) // Compile the circuit - ccs := getTestCircuit(t) + ccs := getTestCircuit() domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) @@ -183,12 +183,16 @@ func assignCircuit() frontend.Circuit { } -func getTestCircuit(t *testing.T) *cs.R1CS { - return sync.OnceValue(func() *cs.R1CS { - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) - require.NoError(t, err) - return ccs.(*cs.R1CS) - })() +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() } func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { @@ -200,7 +204,7 @@ func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.Verifyin require.NoError(t, err) // groth16: ensure proof is verified - proof, err := groth16.Prove(getTestCircuit(t), pk, witness) + proof, err := groth16.Prove(getTestCircuit(), pk, witness) require.NoError(t, err) err = groth16.Verify(proof, vk, pubWitness)