diff --git a/backend/groth16/bls12-377/mpcsetup/lagrange.go b/backend/groth16/bls12-377/mpcsetup/lagrange.go index b7cc5535a3..ce5f5aa201 100644 --- a/backend/groth16/bls12-377/mpcsetup/lagrange.go +++ b/backend/groth16/bls12-377/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls12-377/mpcsetup/marshal.go b/backend/groth16/bls12-377/mpcsetup/marshal.go index ed7e699339..4a87029b8a 100644 --- a/backend/groth16/bls12-377/mpcsetup/marshal.go +++ b/backend/groth16/bls12-377/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bls12-377/mpcsetup/marshal_test.go b/backend/groth16/bls12-377/mpcsetup/marshal_test.go deleted file mode 100644 index 5fcd3594d3..0000000000 --- a/backend/groth16/bls12-377/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls12-377" - cs "github.com/consensys/gnark/constraint/bls12-377" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls12-377/mpcsetup/phase1.go b/backend/groth16/bls12-377/mpcsetup/phase1.go index ab471c2ab6..8a41f52aa5 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase1.go +++ b/backend/groth16/bls12-377/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls12-377/mpcsetup/phase2.go b/backend/groth16/bls12-377/mpcsetup/phase2.go index 48e05fbc60..a72b3fc437 100644 --- a/backend/groth16/bls12-377/mpcsetup/phase2.go +++ b/backend/groth16/bls12-377/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-377" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls12-377/mpcsetup/setup.go b/backend/groth16/bls12-377/mpcsetup/setup.go index 34b1c0e0fb..70fc155f54 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup.go +++ b/backend/groth16/bls12-377/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls12-377" + "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls12-377/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-377" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls12-377/mpcsetup/setup_test.go b/backend/groth16/bls12-377/mpcsetup/setup_test.go index c31ff4bf17..668543b38e 100644 --- a/backend/groth16/bls12-377/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-377/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" cs "github.com/consensys/gnark/constraint/bls12-377" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls12-377/mpcsetup/utils.go b/backend/groth16/bls12-377/mpcsetup/utils.go index fe56aaf198..bdc0e0d268 100644 --- a/backend/groth16/bls12-377/mpcsetup/utils.go +++ b/backend/groth16/bls12-377/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-377" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bls12-377/setup.go b/backend/groth16/bls12-377/setup.go index d4f023fc52..539c20eb97 100644 --- a/backend/groth16/bls12-377/setup.go +++ b/backend/groth16/bls12-377/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls12-377/verify.go b/backend/groth16/bls12-377/verify.go index 3c36f182d2..7a86e3dc76 100644 --- a/backend/groth16/bls12-377/verify.go +++ b/backend/groth16/bls12-377/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-377/fr" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-377/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls12-381/mpcsetup/lagrange.go b/backend/groth16/bls12-381/mpcsetup/lagrange.go index da31e2a6ca..ea6a62659f 100644 --- a/backend/groth16/bls12-381/mpcsetup/lagrange.go +++ b/backend/groth16/bls12-381/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls12-381/mpcsetup/marshal.go b/backend/groth16/bls12-381/mpcsetup/marshal.go index 4d4994d9a6..84006d3a9c 100644 --- a/backend/groth16/bls12-381/mpcsetup/marshal.go +++ b/backend/groth16/bls12-381/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bls12-381/mpcsetup/marshal_test.go b/backend/groth16/bls12-381/mpcsetup/marshal_test.go deleted file mode 100644 index ae297dbf39..0000000000 --- a/backend/groth16/bls12-381/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls12-381" - cs "github.com/consensys/gnark/constraint/bls12-381" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls12-381/mpcsetup/phase1.go b/backend/groth16/bls12-381/mpcsetup/phase1.go index e476f527ab..d1e9106f3d 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase1.go +++ b/backend/groth16/bls12-381/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls12-381/mpcsetup/phase2.go b/backend/groth16/bls12-381/mpcsetup/phase2.go index dbb525181c..b8a8b45fdb 100644 --- a/backend/groth16/bls12-381/mpcsetup/phase2.go +++ b/backend/groth16/bls12-381/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls12-381" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls12-381/mpcsetup/setup.go b/backend/groth16/bls12-381/mpcsetup/setup.go index d6a2608417..cdb6fc9d51 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup.go +++ b/backend/groth16/bls12-381/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls12-381" + "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls12-381/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls12-381" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls12-381/mpcsetup/setup_test.go b/backend/groth16/bls12-381/mpcsetup/setup_test.go index 3757ee19d2..134c97bb86 100644 --- a/backend/groth16/bls12-381/mpcsetup/setup_test.go +++ b/backend/groth16/bls12-381/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" cs "github.com/consensys/gnark/constraint/bls12-381" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls12-381/mpcsetup/utils.go b/backend/groth16/bls12-381/mpcsetup/utils.go index a87234cf37..8c1fffee49 100644 --- a/backend/groth16/bls12-381/mpcsetup/utils.go +++ b/backend/groth16/bls12-381/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls12-381" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bls12-381/setup.go b/backend/groth16/bls12-381/setup.go index 4500f36c27..53b968ed6a 100644 --- a/backend/groth16/bls12-381/setup.go +++ b/backend/groth16/bls12-381/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls12-381/verify.go b/backend/groth16/bls12-381/verify.go index baf4163d16..cf47d66ceb 100644 --- a/backend/groth16/bls12-381/verify.go +++ b/backend/groth16/bls12-381/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls12-381/fr" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls12-381/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls24-315/mpcsetup/lagrange.go b/backend/groth16/bls24-315/mpcsetup/lagrange.go index 156d3a08e4..e8431e6ea7 100644 --- a/backend/groth16/bls24-315/mpcsetup/lagrange.go +++ b/backend/groth16/bls24-315/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls24-315/mpcsetup/marshal.go b/backend/groth16/bls24-315/mpcsetup/marshal.go index 62d1470077..160a9dd13c 100644 --- a/backend/groth16/bls24-315/mpcsetup/marshal.go +++ b/backend/groth16/bls24-315/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bls24-315/mpcsetup/marshal_test.go b/backend/groth16/bls24-315/mpcsetup/marshal_test.go deleted file mode 100644 index cc7e19ed8c..0000000000 --- a/backend/groth16/bls24-315/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls24-315" - cs "github.com/consensys/gnark/constraint/bls24-315" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls24-315/mpcsetup/phase1.go b/backend/groth16/bls24-315/mpcsetup/phase1.go index e98b00ddcc..babe035253 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase1.go +++ b/backend/groth16/bls24-315/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls24-315/mpcsetup/phase2.go b/backend/groth16/bls24-315/mpcsetup/phase2.go index 9c1c74e359..a957abf549 100644 --- a/backend/groth16/bls24-315/mpcsetup/phase2.go +++ b/backend/groth16/bls24-315/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-315" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls24-315/mpcsetup/setup.go b/backend/groth16/bls24-315/mpcsetup/setup.go index df3abc91bb..95f7e90f4b 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup.go +++ b/backend/groth16/bls24-315/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls24-315" + "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls24-315/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-315" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls24-315/mpcsetup/setup_test.go b/backend/groth16/bls24-315/mpcsetup/setup_test.go index 786bcbdfe0..6121a3b543 100644 --- a/backend/groth16/bls24-315/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-315/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" cs "github.com/consensys/gnark/constraint/bls24-315" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls24-315/mpcsetup/utils.go b/backend/groth16/bls24-315/mpcsetup/utils.go index 95e43b8c5a..4240bca495 100644 --- a/backend/groth16/bls24-315/mpcsetup/utils.go +++ b/backend/groth16/bls24-315/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-315" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bls24-315/setup.go b/backend/groth16/bls24-315/setup.go index fcd392fcc6..60265255b3 100644 --- a/backend/groth16/bls24-315/setup.go +++ b/backend/groth16/bls24-315/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls24-315/verify.go b/backend/groth16/bls24-315/verify.go index 8a4d1f6755..59318d9067 100644 --- a/backend/groth16/bls24-315/verify.go +++ b/backend/groth16/bls24-315/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls24-315/fr" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-315/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bls24-317/mpcsetup/lagrange.go b/backend/groth16/bls24-317/mpcsetup/lagrange.go index e8b45d396e..c781945d79 100644 --- a/backend/groth16/bls24-317/mpcsetup/lagrange.go +++ b/backend/groth16/bls24-317/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bls24-317/mpcsetup/marshal.go b/backend/groth16/bls24-317/mpcsetup/marshal.go index b087846391..869c1af583 100644 --- a/backend/groth16/bls24-317/mpcsetup/marshal.go +++ b/backend/groth16/bls24-317/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bls24-317/mpcsetup/marshal_test.go b/backend/groth16/bls24-317/mpcsetup/marshal_test.go deleted file mode 100644 index a6905524b3..0000000000 --- a/backend/groth16/bls24-317/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bls24-317" - cs "github.com/consensys/gnark/constraint/bls24-317" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bls24-317/mpcsetup/phase1.go b/backend/groth16/bls24-317/mpcsetup/phase1.go index 0004ef4909..bbb5bae69b 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase1.go +++ b/backend/groth16/bls24-317/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bls24-317/mpcsetup/phase2.go b/backend/groth16/bls24-317/mpcsetup/phase2.go index 6f7a2bf8dd..184e34aec3 100644 --- a/backend/groth16/bls24-317/mpcsetup/phase2.go +++ b/backend/groth16/bls24-317/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bls24-317" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bls24-317/mpcsetup/setup.go b/backend/groth16/bls24-317/mpcsetup/setup.go index e01ada3991..c659254d23 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup.go +++ b/backend/groth16/bls24-317/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bls24-317" + "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bls24-317/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bls24-317" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bls24-317/mpcsetup/setup_test.go b/backend/groth16/bls24-317/mpcsetup/setup_test.go index 80cf432635..e72874bf88 100644 --- a/backend/groth16/bls24-317/mpcsetup/setup_test.go +++ b/backend/groth16/bls24-317/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" cs "github.com/consensys/gnark/constraint/bls24-317" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bls24-317/mpcsetup/utils.go b/backend/groth16/bls24-317/mpcsetup/utils.go index 51d4583a4a..2e837140bf 100644 --- a/backend/groth16/bls24-317/mpcsetup/utils.go +++ b/backend/groth16/bls24-317/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bls24-317" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bls24-317/setup.go b/backend/groth16/bls24-317/setup.go index f1962d9b38..68812bfac6 100644 --- a/backend/groth16/bls24-317/setup.go +++ b/backend/groth16/bls24-317/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bls24-317/verify.go b/backend/groth16/bls24-317/verify.go index e1fafd4549..2623657a68 100644 --- a/backend/groth16/bls24-317/verify.go +++ b/backend/groth16/bls24-317/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bls24-317/fr" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bls24-317/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bn254/mpcsetup/lagrange.go b/backend/groth16/bn254/mpcsetup/lagrange.go index c694ee35bb..5e994f5c7e 100644 --- a/backend/groth16/bn254/mpcsetup/lagrange.go +++ b/backend/groth16/bn254/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bn254/mpcsetup/marshal.go b/backend/groth16/bn254/mpcsetup/marshal.go index 94156c9897..c2560b3cb4 100644 --- a/backend/groth16/bn254/mpcsetup/marshal.go +++ b/backend/groth16/bn254/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bn254/mpcsetup/marshal_test.go b/backend/groth16/bn254/mpcsetup/marshal_test.go deleted file mode 100644 index 0b615cfb90..0000000000 --- a/backend/groth16/bn254/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bn254" - cs "github.com/consensys/gnark/constraint/bn254" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bn254/mpcsetup/phase1.go b/backend/groth16/bn254/mpcsetup/phase1.go index 2ba641bfe8..d03761007f 100644 --- a/backend/groth16/bn254/mpcsetup/phase1.go +++ b/backend/groth16/bn254/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bn254/mpcsetup/phase2.go b/backend/groth16/bn254/mpcsetup/phase2.go index c0f4874dc6..65195b10d0 100644 --- a/backend/groth16/bn254/mpcsetup/phase2.go +++ b/backend/groth16/bn254/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bn254/mpcsetup/setup.go b/backend/groth16/bn254/mpcsetup/setup.go index e2621ce159..49f31260d2 100644 --- a/backend/groth16/bn254/mpcsetup/setup.go +++ b/backend/groth16/bn254/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bn254/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bn254/mpcsetup/setup_test.go b/backend/groth16/bn254/mpcsetup/setup_test.go index 4e01da2c5a..290ffd2e62 100644 --- a/backend/groth16/bn254/mpcsetup/setup_test.go +++ b/backend/groth16/bn254/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bn254" "github.com/consensys/gnark-crypto/ecc/bn254/fr" cs "github.com/consensys/gnark/constraint/bn254" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,71 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bn254/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + ccs := getTestCircuit() - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -92,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -109,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -121,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -154,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta + // Build the witness + var preImage, hash fr.Element - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - return r } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) + +func getTestCircuit() *cs.R1CS { + return onceCircuit() +} + +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bn254/mpcsetup/unit_test.go b/backend/groth16/bn254/mpcsetup/unit_test.go new file mode 100644 index 0000000000..2449423b87 --- /dev/null +++ b/backend/groth16/bn254/mpcsetup/unit_test.go @@ -0,0 +1,286 @@ +package mpcsetup + +import ( + "fmt" + "github.com/consensys/gnark-crypto/ecc" + curve "github.com/consensys/gnark-crypto/ecc/bn254" + "github.com/consensys/gnark-crypto/ecc/bn254/fr" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bn254" + "github.com/consensys/gnark/constraint" + cs "github.com/consensys/gnark/constraint/bn254" + "github.com/consensys/gnark/frontend" + "github.com/consensys/gnark/frontend/cs/r1cs" + "github.com/consensys/gnark/internal/utils/test_utils" + gnarkio "github.com/consensys/gnark/io" + "github.com/stretchr/testify/require" + "math/big" + "slices" + "testing" +) + +// TestSetupBeaconOnly tests the setup/key extraction +// as well as the random beacon contribution +// without any untrusted contributors +func TestSetupBeaconOnly(t *testing.T) { + + // Compile the circuit + ccs := getTestCircuit() + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) + + var ( + p1 Phase1 + p2 Phase2 + ) + p1.Initialize(domainSize) + commons := p1.Seal([]byte("beacon 1")) + + evals := p2.Initialize(ccs, &commons) + pk, vk := p2.Seal(&commons, &evals, []byte("beacon 2")) + + _pk := pk.(*groth16Impl.ProvingKey) + + rpk, rvk, err := groth16.Setup(ccs) + require.NoError(t, err) + _rpk := rpk.(*groth16Impl.ProvingKey) + + // assert everything is of the same size + require.Equal(t, _rpk.Domain.Cardinality, _pk.Domain.Cardinality) + require.Equal(t, len(_rpk.G1.A), len(_pk.G1.A)) + require.Equal(t, len(_rpk.G1.B), len(_pk.G1.B)) + require.Equal(t, len(_rpk.G1.K), len(_pk.G1.K)) + require.Equal(t, len(_rpk.G1.Z), len(_pk.G1.Z)) + require.Equal(t, len(_rpk.G2.B), len(_pk.G2.B)) + require.Equal(t, len(_rpk.CommitmentKeys), len(_pk.CommitmentKeys)) + for i := range _rpk.CommitmentKeys { + require.Equal(t, len(_rpk.CommitmentKeys[i].BasisExpSigma), len(_pk.CommitmentKeys[i].BasisExpSigma)) + require.Equal(t, len(_rpk.CommitmentKeys[i].Basis), len(_pk.CommitmentKeys[i].Basis)) + } + + proveVerifyCircuit(t, rpk, rvk) + fmt.Println("regular proof verified") + proveVerifyCircuit(t, pk, vk) + fmt.Println("mpc proof verified") +} + +// TestNoContributors tests the beacon and some of the serialization +func TestNoContributors(t *testing.T) { + testAll(t, 0, 0) +} + +func TestOnePhase1Contribute(t *testing.T) { + testAll(t, 2, 0) +} + +func commonsSmallValues(N, tau, alpha, beta uint64) SrsCommons { + var ( + res SrsCommons + I big.Int + coeff fr.Element + ) + _, _, g1, g2 := curve.Generators() + tauPowers := powersI(tau, int(2*N-1)) + res.G1.Tau = make([]curve.G1Affine, 2*N-1) + for i := range res.G1.Tau { + tauPowers[i].BigInt(&I) + res.G1.Tau[i].ScalarMultiplication(&g1, &I) + } + + res.G2.Tau = make([]curve.G2Affine, N) + for i := range res.G2.Tau { + tauPowers[i].BigInt(&I) + res.G2.Tau[i].ScalarMultiplication(&g2, &I) + } + + res.G1.AlphaTau = make([]curve.G1Affine, N) + coeff.SetUint64(alpha) + for i := range res.G1.AlphaTau { + var x fr.Element + x.Mul(&tauPowers[i], &coeff) + x.BigInt(&I) + res.G1.AlphaTau[i].ScalarMultiplication(&g1, &I) + } + + res.G1.BetaTau = make([]curve.G1Affine, N) + coeff.SetUint64(beta) + for i := range res.G1.BetaTau { + var x fr.Element + x.Mul(&tauPowers[i], &coeff) + x.BigInt(&I) + res.G1.BetaTau[i].ScalarMultiplication(&g1, &I) + } + + I.SetUint64(beta) + res.G2.Beta.ScalarMultiplication(&g2, &I) + + return res +} + +func powersI(x uint64, n int) []fr.Element { + var y fr.Element + y.SetUint64(x) + return powers(&y, n) +} + +func TestPowers(t *testing.T) { + var x fr.Element + x.SetUint64(2) + x2 := powers(&x, 10) + for i := range x2 { + require.True(t, x2[i].IsUint64()) + require.Equal(t, x2[i].Uint64(), uint64(1< 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bw6-633/mpcsetup/marshal_test.go b/backend/groth16/bw6-633/mpcsetup/marshal_test.go deleted file mode 100644 index f4e12c3daf..0000000000 --- a/backend/groth16/bw6-633/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bw6-633" - cs "github.com/consensys/gnark/constraint/bw6-633" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bw6-633/mpcsetup/phase1.go b/backend/groth16/bw6-633/mpcsetup/phase1.go index 57c4528d3c..6c363dc848 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase1.go +++ b/backend/groth16/bw6-633/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bw6-633/mpcsetup/phase2.go b/backend/groth16/bw6-633/mpcsetup/phase2.go index 21e5ab4050..657f699c66 100644 --- a/backend/groth16/bw6-633/mpcsetup/phase2.go +++ b/backend/groth16/bw6-633/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-633" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bw6-633/mpcsetup/setup.go b/backend/groth16/bw6-633/mpcsetup/setup.go index cdb115da1c..e034125ffd 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup.go +++ b/backend/groth16/bw6-633/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bw6-633" + "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bw6-633/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-633" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bw6-633/mpcsetup/setup_test.go b/backend/groth16/bw6-633/mpcsetup/setup_test.go index 689e00d487..e551c04ccb 100644 --- a/backend/groth16/bw6-633/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-633/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" cs "github.com/consensys/gnark/constraint/bw6-633" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bw6-633/mpcsetup/utils.go b/backend/groth16/bw6-633/mpcsetup/utils.go index 479f40f849..622e86d652 100644 --- a/backend/groth16/bw6-633/mpcsetup/utils.go +++ b/backend/groth16/bw6-633/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-633" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bw6-633/setup.go b/backend/groth16/bw6-633/setup.go index d7ce66cdef..5d16301890 100644 --- a/backend/groth16/bw6-633/setup.go +++ b/backend/groth16/bw6-633/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bw6-633/verify.go b/backend/groth16/bw6-633/verify.go index 745ba7a7d0..399fdf4891 100644 --- a/backend/groth16/bw6-633/verify.go +++ b/backend/groth16/bw6-633/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bw6-633/fr" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-633/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/bw6-761/mpcsetup/lagrange.go b/backend/groth16/bw6-761/mpcsetup/lagrange.go index ce12bdb796..e0ebb14e7b 100644 --- a/backend/groth16/bw6-761/mpcsetup/lagrange.go +++ b/backend/groth16/bw6-761/mpcsetup/lagrange.go @@ -75,7 +75,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -103,7 +103,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/backend/groth16/bw6-761/mpcsetup/marshal.go b/backend/groth16/bw6-761/mpcsetup/marshal.go index 811e290f68..5e2c6cb7c3 100644 --- a/backend/groth16/bw6-761/mpcsetup/marshal.go +++ b/backend/groth16/bw6-761/mpcsetup/marshal.go @@ -6,165 +6,245 @@ package mpcsetup import ( + "encoding/binary" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + "github.com/consensys/gnark/internal/utils" "io" ) // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") } + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") + } + + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil } diff --git a/backend/groth16/bw6-761/mpcsetup/marshal_test.go b/backend/groth16/bw6-761/mpcsetup/marshal_test.go deleted file mode 100644 index 0979c6e085..0000000000 --- a/backend/groth16/bw6-761/mpcsetup/marshal_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2020-2025 Consensys Software Inc. -// Licensed under the Apache License, Version 2.0. See the LICENSE file for details. - -// Code generated by gnark DO NOT EDIT - -package mpcsetup - -import ( - "testing" - - curve "github.com/consensys/gnark-crypto/ecc/bw6-761" - cs "github.com/consensys/gnark/constraint/bw6-761" - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - gnarkio "github.com/consensys/gnark/io" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} diff --git a/backend/groth16/bw6-761/mpcsetup/phase1.go b/backend/groth16/bw6-761/mpcsetup/phase1.go index 72689cc556..b60149235e 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase1.go +++ b/backend/groth16/bw6-761/mpcsetup/phase1.go @@ -6,187 +6,260 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" + "fmt" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" - "math" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "sync" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - PublicKeys struct { - Tau, Alpha, Beta PublicKey - } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) + } + } + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) } } - return nil + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { + + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") - } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} diff --git a/backend/groth16/bw6-761/mpcsetup/phase2.go b/backend/groth16/bw6-761/mpcsetup/phase2.go index 5ed0b3abef..d5d629c201 100644 --- a/backend/groth16/bw6-761/mpcsetup/phase2.go +++ b/backend/groth16/bw6-761/mpcsetup/phase2.go @@ -6,48 +6,200 @@ package mpcsetup import ( + "bytes" "crypto/sha256" "errors" - "math/big" - + "fmt" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" cs "github.com/consensys/gnark/constraint/bw6-761" + "github.com/consensys/gnark/internal/utils" + "math/big" + "slices" + "sync" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) + } + } + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) } } - PublicKey PublicKey - Hash []byte + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) +} + +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -89,26 +241,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -129,125 +285,96 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} - -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) - - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} - -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } diff --git a/backend/groth16/bw6-761/mpcsetup/setup.go b/backend/groth16/bw6-761/mpcsetup/setup.go index 97886b2b99..c94333e007 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup.go +++ b/backend/groth16/bw6-761/mpcsetup/setup.go @@ -8,23 +8,46 @@ package mpcsetup import ( curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/fft" - groth16 "github.com/consensys/gnark/backend/groth16/bw6-761" + "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" + "github.com/consensys/gnark-crypto/ecc/bw6-761/mpcsetup" + "github.com/consensys/gnark/backend/groth16" + groth16Impl "github.com/consensys/gnark/backend/groth16/bw6-761" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -69,18 +92,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk + return &pk, &vk } diff --git a/backend/groth16/bw6-761/mpcsetup/setup_test.go b/backend/groth16/bw6-761/mpcsetup/setup_test.go index dcbb22c31b..506ca5cd9f 100644 --- a/backend/groth16/bw6-761/mpcsetup/setup_test.go +++ b/backend/groth16/bw6-761/mpcsetup/setup_test.go @@ -6,9 +6,14 @@ package mpcsetup import ( + "bytes" + "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" cs "github.com/consensys/gnark/constraint/bw6-761" + "io" + "slices" + "sync" "testing" "github.com/consensys/gnark/backend/groth16" @@ -20,74 +25,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - if testing.Short() { - t.Skip() - } - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) - - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - pubWitness, err := witness.Public() - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) - err = groth16.Verify(proof, &vk, pubWitness) - assert.NoError(err) + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -95,13 +102,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -112,8 +121,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -124,17 +135,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -157,32 +170,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/backend/groth16/bw6-761/mpcsetup/utils.go b/backend/groth16/bw6-761/mpcsetup/utils.go index f7c1a2c819..3162f45d10 100644 --- a/backend/groth16/bw6-761/mpcsetup/utils.go +++ b/backend/groth16/bw6-761/mpcsetup/utils.go @@ -6,46 +6,15 @@ package mpcsetup import ( - "bytes" - "math/big" - "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" curve "github.com/consensys/gnark-crypto/ecc/bw6-761" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "math/big" + "math/bits" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -58,17 +27,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -79,7 +52,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -90,70 +64,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 } diff --git a/backend/groth16/bw6-761/setup.go b/backend/groth16/bw6-761/setup.go index fea8d92c7f..e8c0ed394a 100644 --- a/backend/groth16/bw6-761/setup.go +++ b/backend/groth16/bw6-761/setup.go @@ -133,7 +133,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -145,37 +145,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment - nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) + nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { diff --git a/backend/groth16/bw6-761/verify.go b/backend/groth16/bw6-761/verify.go index df89379e87..e1ba4a95a4 100644 --- a/backend/groth16/bw6-761/verify.go +++ b/backend/groth16/bw6-761/verify.go @@ -16,7 +16,6 @@ import ( "github.com/consensys/gnark-crypto/ecc/bw6-761/fr" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/hash_to_field" "github.com/consensys/gnark-crypto/ecc/bw6-761/fr/pedersen" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -64,7 +63,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/backend/groth16/internal/utils.go b/backend/groth16/internal/utils.go index 6062ef57ce..0694b18497 100644 --- a/backend/groth16/internal/utils.go +++ b/backend/groth16/internal/utils.go @@ -1,5 +1,10 @@ package internal +import ( + "math" + "slices" +) + func ConcatAll(slices ...[]int) []int { // copyright note: written by GitHub Copilot totalLen := 0 for _, s := range slices { @@ -20,3 +25,58 @@ func NbElements(slices [][]int) int { // copyright note: written by GitHub Copil } return totalLen } + +// NewMergeIterator assumes that all slices in s are sorted +func NewMergeIterator(s [][]int) *MergeIterator { + res := &MergeIterator{slices: slices.Clone(s)} + res.findLeast() + return res +} + +// MergeIterator iterates through a merging of multiple sorted slices +type MergeIterator struct { + slices [][]int + leastIndex int +} + +func (i *MergeIterator) findLeast() { + value := math.MaxInt + i.leastIndex = -1 + for j := range i.slices { + if len(i.slices[j]) == 0 { + continue + } + if v := i.slices[j][0]; v < value { + value = v + i.leastIndex = j + } + } +} + +// Peek returns the next smallest value and the index of the slice it came from +// If the iterator is empty, Peek returns (math.MaxInt, -1) +func (i *MergeIterator) Peek() (value, index int) { + if i.leastIndex == -1 { + return math.MaxInt, -1 + } + return i.slices[i.leastIndex][0], i.leastIndex +} + +// Next returns the next smallest value and the index of the slice it came from, and advances the iterator +// If the iterator is empty, Next returns (math.MaxInt, -1) +func (i *MergeIterator) Next() (value, index int) { + value, index = i.Peek() + i.slices[i.leastIndex] = i.slices[i.leastIndex][1:] + i.findLeast() + return +} + +// IndexIfNext returns the index of the slice and advances the iterator if the next value is value, otherwise returns -1 +// If the iterator is empty, IndexIfNext returns -1 +func (i *MergeIterator) IndexIfNext(value int) int { + if v, index := i.Peek(); v == value { + i.Next() + return index + } + return -1 +} diff --git a/constraint/bls12-377/gkr.go b/constraint/bls12-377/gkr.go index 3bb47e8ce9..948ed1510d 100644 --- a/constraint/bls12-377/gkr.go +++ b/constraint/bls12-377/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bls12-381/gkr.go b/constraint/bls12-381/gkr.go index 3d028efdfd..acf57d9d88 100644 --- a/constraint/bls12-381/gkr.go +++ b/constraint/bls12-381/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bls24-315/gkr.go b/constraint/bls24-315/gkr.go index 0397c58171..e39d7447c7 100644 --- a/constraint/bls24-315/gkr.go +++ b/constraint/bls24-315/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bls24-317/gkr.go b/constraint/bls24-317/gkr.go index 1a4e8ad137..76d080a489 100644 --- a/constraint/bls24-317/gkr.go +++ b/constraint/bls24-317/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bn254/gkr.go b/constraint/bn254/gkr.go index 2497c7c209..88dd7905ab 100644 --- a/constraint/bn254/gkr.go +++ b/constraint/bn254/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bw6-633/gkr.go b/constraint/bw6-633/gkr.go index 8d8f53bb81..81fbe4c52c 100644 --- a/constraint/bw6-633/gkr.go +++ b/constraint/bw6-633/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/bw6-761/gkr.go b/constraint/bw6-761/gkr.go index 4375c8057f..0066d302c7 100644 --- a/constraint/bw6-761/gkr.go +++ b/constraint/bw6-761/gkr.go @@ -14,7 +14,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/constraint/gkr.go b/constraint/gkr.go index e1337cb0b9..f9d8727ca3 100644 --- a/constraint/gkr.go +++ b/constraint/gkr.go @@ -4,9 +4,8 @@ import ( "fmt" "sort" - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + "github.com/consensys/gnark/internal/utils" ) type GkrVariable int // Just an alias to hide implementation details. May be more trouble than worth @@ -83,26 +82,26 @@ func (d *GkrInfo) Compile(nbInstances int) (GkrPermutations, error) { } } - p.SortedInstances, _ = algo_utils.TopologicalSort(instanceDeps) - p.InstancesPermutation = algo_utils.InvertPermutation(p.SortedInstances) + p.SortedInstances, _ = utils.TopologicalSort(instanceDeps) + p.InstancesPermutation = utils.InvertPermutation(p.SortedInstances) // this whole circuit sorting is a bit of a charade. if things are built using an api, there's no way it could NOT already be topologically sorted // worth keeping for future-proofing? - inputs := algo_utils.Map(d.Circuit, func(w GkrWire) []int { + inputs := utils.Map(d.Circuit, func(w GkrWire) []int { return w.Inputs }) var uniqueOuts [][]int - p.SortedWires, uniqueOuts = algo_utils.TopologicalSort(inputs) - p.WiresPermutation = algo_utils.InvertPermutation(p.SortedWires) - wirePermutationAt := algo_utils.SliceAt(p.WiresPermutation) + p.SortedWires, uniqueOuts = utils.TopologicalSort(inputs) + p.WiresPermutation = utils.InvertPermutation(p.SortedWires) + wirePermutationAt := utils.SliceAt(p.WiresPermutation) sorted := make([]GkrWire, len(d.Circuit)) // TODO: Directly manipulate d.Circuit instead for newI, oldI := range p.SortedWires { oldW := d.Circuit[oldI] if !oldW.IsInput() { - d.MaxNIns = utils.Max(d.MaxNIns, len(oldW.Inputs)) + d.MaxNIns = max(d.MaxNIns, len(oldW.Inputs)) } for j := range oldW.Dependencies { @@ -122,7 +121,7 @@ func (d *GkrInfo) Compile(nbInstances int) (GkrPermutations, error) { sorted[newI] = GkrWire{ Gate: oldW.Gate, - Inputs: algo_utils.Map(oldW.Inputs, wirePermutationAt), + Inputs: utils.Map(oldW.Inputs, wirePermutationAt), Dependencies: oldW.Dependencies, NbUniqueOutputs: len(uniqueOuts[oldI]), } diff --git a/go.mod b/go.mod index caa4a8e03a..dbeef78306 100644 --- a/go.mod +++ b/go.mod @@ -5,11 +5,11 @@ go 1.22 toolchain go1.22.6 require ( - github.com/bits-and-blooms/bitset v1.14.2 + github.com/bits-and-blooms/bitset v1.20.0 github.com/blang/semver/v4 v4.0.0 - github.com/consensys/bavard v0.1.25 + github.com/consensys/bavard v0.1.27 github.com/consensys/compress v0.2.5 - github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6 + github.com/consensys/gnark-crypto v0.15.0 github.com/fxamacker/cbor/v2 v2.7.0 github.com/google/go-cmp v0.6.0 github.com/google/pprof v0.0.0-20240727154555-813a5fbdbec8 @@ -18,10 +18,10 @@ require ( github.com/leanovate/gopter v0.2.11 github.com/ronanh/intcomp v1.1.0 github.com/rs/zerolog v1.33.0 - github.com/stretchr/testify v1.9.0 - golang.org/x/crypto v0.31.0 + github.com/stretchr/testify v1.10.0 + golang.org/x/crypto v0.32.0 golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 - golang.org/x/sync v0.8.0 + golang.org/x/sync v0.10.0 ) require ( @@ -31,7 +31,7 @@ require ( github.com/mmcloughlin/addchain v0.4.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/x448/float16 v0.8.4 // indirect - golang.org/x/sys v0.28.0 // indirect + golang.org/x/sys v0.29.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect rsc.io/tmplfunc v0.0.3 // indirect ) diff --git a/go.sum b/go.sum index e20d4967aa..5f24bbd3a5 100644 --- a/go.sum +++ b/go.sum @@ -44,8 +44,8 @@ github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hC github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bits-and-blooms/bitset v1.14.2 h1:YXVoyPndbdvcEVcseEovVfp0qjJp7S+i5+xgp/Nfbdc= -github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.20.0 h1:2F+rfL86jE2d/bmw7OhqUg2Sj/1rURkBn3MdfoPyRVU= +github.com/bits-and-blooms/bitset v1.20.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ= @@ -57,12 +57,12 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/consensys/bavard v0.1.25 h1:5YcSBnp03/HvfpKaIQLr/ecspTp2k8YNR5rQLOWvUyc= -github.com/consensys/bavard v0.1.25/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= +github.com/consensys/bavard v0.1.27 h1:j6hKUrGAy/H+gpNrpLU3I26n1yc+VMGmd6ID5+gAhOs= +github.com/consensys/bavard v0.1.27/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= github.com/consensys/compress v0.2.5 h1:gJr1hKzbOD36JFsF1AN8lfXz1yevnJi1YolffY19Ntk= github.com/consensys/compress v0.2.5/go.mod h1:pyM+ZXiNUh7/0+AUjUf9RKUM6vSH7T/fsn5LLS0j1Tk= -github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6 h1:P4DeR8HYfQGl4Vj6KEv0Eszcokroit/U1dRrUsgt+js= -github.com/consensys/gnark-crypto v0.14.1-0.20250116204316-e7fd38b0a0a6/go.mod h1:q9s22Y0WIHd9UCBfD+xGeW8wDJ7WAGZZpMrLFqzBzrQ= +github.com/consensys/gnark-crypto v0.15.0 h1:OXsWnhheHV59eXIzhL5OIexa/vqTK8wtRYQCtwfMDtY= +github.com/consensys/gnark-crypto v0.15.0/go.mod h1:Ke3j06ndtPTVvo++PhGNgvm+lgpLvzbcE2MqljY7diU= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -271,8 +271,8 @@ github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81P github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= -github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -304,8 +304,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= -golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= +golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -410,8 +410,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -462,8 +462,8 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= -golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= +golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= diff --git a/internal/generator/backend/main.go b/internal/generator/backend/main.go index 30e8e21b87..ed5fa977ea 100644 --- a/internal/generator/backend/main.go +++ b/internal/generator/backend/main.go @@ -177,7 +177,6 @@ func main() { entries = []bavard.Entry{ {File: filepath.Join(groth16MpcSetupDir, "lagrange.go"), Templates: []string{"groth16/mpcsetup/lagrange.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "marshal.go"), Templates: []string{"groth16/mpcsetup/marshal.go.tmpl", importCurve}}, - {File: filepath.Join(groth16MpcSetupDir, "marshal_test.go"), Templates: []string{"groth16/mpcsetup/marshal_test.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "phase1.go"), Templates: []string{"groth16/mpcsetup/phase1.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "phase2.go"), Templates: []string{"groth16/mpcsetup/phase2.go.tmpl", importCurve}}, {File: filepath.Join(groth16MpcSetupDir, "setup.go"), Templates: []string{"groth16/mpcsetup/setup.go.tmpl", importCurve}}, diff --git a/internal/generator/backend/template/representations/gkr.go.tmpl b/internal/generator/backend/template/representations/gkr.go.tmpl index 32ff5da874..9030cc43a7 100644 --- a/internal/generator/backend/template/representations/gkr.go.tmpl +++ b/internal/generator/backend/template/representations/gkr.go.tmpl @@ -7,7 +7,7 @@ import ( "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/constraint" hint "github.com/consensys/gnark/constraint/solver" - "github.com/consensys/gnark/internal/algo_utils" + algo_utils "github.com/consensys/gnark/internal/utils" "hash" "math/big" "sync" diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl index c184bb2829..712e5be68c 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.setup.go.tmpl @@ -59,7 +59,7 @@ type VerifyingKey struct { // e(α, β) e curve.GT // not serialized - CommitmentKeys []pedersen.VerifyingKey + CommitmentKeys []pedersen.VerifyingKey PublicAndCommitmentCommitted [][]int // indexes of public/commitment committed variables } @@ -126,7 +126,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vkK := make([]fr.Element, nbPublicWires) ckK := make([][]fr.Element, len(commitmentInfo)) for i := range commitmentInfo { - ckK[i] = make([]fr.Element, len(privateCommitted[i])) + ckK[i] = make([]fr.Element, 0, len(privateCommitted[i])) } var t0, t1 fr.Element @@ -138,37 +138,29 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { Add(&t1, &C[i]). Mul(&t1, coeff) } - vI := 0 // number of public wires seen so far - cI := make([]int, len(commitmentInfo)) // number of private committed wires seen so far for each commitment + vI := 0 // number of public wires seen so far + committedIterator := internal.NewMergeIterator(privateCommitted) nbPrivateCommittedSeen := 0 // = ∑ᵢ cI[i] nbCommitmentsSeen := 0 for i := range A { - commitment := -1 // index of the commitment that commits to this variable as a private or commitment value - var isCommitment, isPublic bool - if isPublic = i < r1cs.GetNbPublicVariables(); !isPublic { + commitmentIndex := committedIterator.IndexIfNext(i) // the index of the commitment that commits to the wire i. -1 if i is not committed + isCommitment, isPublic := false, i < r1cs.GetNbPublicVariables() + if !isPublic { if nbCommitmentsSeen < len(commitmentWires) && commitmentWires[nbCommitmentsSeen] == i { isCommitment = true nbCommitmentsSeen++ } - - for j := range commitmentInfo { // does commitment j commit to i? - if cI[j] < len(privateCommitted[j]) && privateCommitted[j][cI[j]] == i { - commitment = j - break // frontend guarantees that no private variable is committed to more than once - } - } } - if isPublic || commitment != -1 || isCommitment { + if isPublic || isCommitment || commitmentIndex != -1 { computeK(i, &toxicWaste.gammaInv) if isPublic || isCommitment { vkK[vI] = t1 vI++ } else { // committed and private - ckK[commitment][cI[commitment]] = t1 - cI[commitment]++ + ckK[commitmentIndex] = append(ckK[commitmentIndex], t1) nbPrivateCommittedSeen++ } } else { @@ -245,7 +237,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { offset += len(B) bitReverse(g1PointsAff[offset : offset+int(domain.Cardinality)]) - sizeZ := int(domain.Cardinality)-1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 + sizeZ := int(domain.Cardinality) - 1 // deg(H)=deg(A*B-C/X^n-1)=(n-1)+(n-1)-n=n-2 pk.G1.Z = g1PointsAff[offset : offset+sizeZ] offset += int(domain.Cardinality) @@ -321,7 +313,7 @@ func Setup(r1cs *cs.R1CS, pk *ProvingKey, vk *VerifyingKey) error { vk.G1.Delta = pk.G1.Delta if err := vk.Precompute(); err != nil { - return err + return err } // set domain @@ -404,7 +396,7 @@ func setupABC(r1cs *cs.R1CS, domain *fft.Domain, toxicWaste toxicWaste) (A []fr. j := 0 it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { for _, t := range c.L { accumulate(&A[t.WireID()], t, &L) } @@ -419,7 +411,7 @@ func setupABC(r1cs *cs.R1CS, domain *fft.Domain, toxicWaste toxicWaste) (A []fr. L.Mul(&L, &w) L.Mul(&L, &t[j]) L.Mul(&L, &tInv[j+1]) - + j++ } @@ -557,7 +549,7 @@ func DummySetup(r1cs *cs.R1CS, pk *ProvingKey) error { } } - pk.CommitmentKeys,_, err = pedersen.Setup(commitmentBases) + pk.CommitmentKeys, _, err = pedersen.Setup(commitmentBases) if err != nil { return err } @@ -575,7 +567,7 @@ func dummyInfinityCount(r1cs *cs.R1CS) (nbZeroesA, nbZeroesB int) { B := make([]bool, nbWires) it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { for _, t := range c.L { A[t.WireID()] = true } @@ -584,7 +576,6 @@ func dummyInfinityCount(r1cs *cs.R1CS) (nbZeroesA, nbZeroesB int) { } } - for i := 0; i < nbWires; i++ { if !A[i] { nbZeroesA++ diff --git a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl index 06abd155e7..24682c2b74 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/groth16.verify.go.tmpl @@ -17,7 +17,6 @@ import ( {{- template "import_fr" . }} {{- template "import_pedersen" .}} {{- template "import_hash_to_field" . }} - "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/backend" "github.com/consensys/gnark/backend/solidity" "github.com/consensys/gnark/constraint" @@ -65,7 +64,7 @@ func Verify(proof *Proof, vk *VerifyingKey, publicWitness fr.Vector, opts ...bac maxNbPublicCommitted := 0 for _, s := range vk.PublicAndCommitmentCommitted { // iterate over commitments - maxNbPublicCommitted = utils.Max(maxNbPublicCommitted, len(s)) + maxNbPublicCommitted = max(maxNbPublicCommitted, len(s)) } commitmentsSerialized := make([]byte, len(vk.PublicAndCommitmentCommitted)*fr.Bytes) commitmentPrehashSerialized := make([]byte, curve.SizeOfG1AffineUncompressed+maxNbPublicCommitted*fr.Bytes) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl index 348c567418..c96f439aa8 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/lagrange.go.tmpl @@ -70,7 +70,7 @@ func butterflyG2(a *curve.G2Affine, b *curve.G2Affine) { b.Sub(&t, b) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[0], &a[4]) butterflyG1(&a[1], &a[5]) @@ -98,7 +98,7 @@ func kerDIF8G1(a []curve.G1Affine, twiddles [][]fr.Element, stage int) { butterflyG1(&a[6], &a[7]) } -// kerDIF8 is a kernel that process a FFT of size 8 +// kerDIF8 is a kernel that processes an FFT of size 8 func kerDIF8G2(a []curve.G2Affine, twiddles [][]fr.Element, stage int) { butterflyG2(&a[0], &a[4]) butterflyG2(&a[1], &a[5]) diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl index 3af994ae04..a76381102d 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal.go.tmpl @@ -1,165 +1,245 @@ import ( "io" - + "encoding/binary" + "github.com/consensys/gnark/internal/utils" {{- template "import_curve" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) + // WriteTo implements io.WriterTo -func (phase1 *Phase1) WriteTo(writer io.Writer) (int64, error) { - n, err := phase1.writeTo(writer) - if err != nil { - return n, err +// It does not write the Challenge from the previous contribution +func (p *Phase1) WriteTo(writer io.Writer) (n int64, err error) { + var dn int64 + for _, v := range []io.WriterTo{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.WriteTo(writer) + n += dn + if err != nil { + return + } + } + return +} + +// ReadFrom implements io.ReaderFrom +// It does not read the Challenge from the previous contribution +func (p *Phase1) ReadFrom(reader io.Reader) (n int64, err error) { + var dn int64 + for _, v := range []io.ReaderFrom{ + &p.proofs.Tau, + &p.proofs.Alpha, + &p.proofs.Beta, + &p.parameters, + } { + dn, err = v.ReadFrom(reader) + n += dn + if err != nil { + return + } } - nBytes, err := writer.Write(phase1.Hash) - return int64(nBytes) + n, err + return } -func (phase1 *Phase1) writeTo(writer io.Writer) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - phase1.Parameters.G1.Tau, - phase1.Parameters.G1.AlphaTau, - phase1.Parameters.G1.BetaTau, - phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +// slice of references for the parameters of p +func (p *Phase2) refsSlice() []any { + nbCommitments := len(p.Parameters.G2.Sigma) + if nbCommitments > 65535 { + panic("nbCommitments not fitting in 16 bits") + } + + expectedLen := 2*nbCommitments + 5 + refs := make([]any, 5, expectedLen) + refs[0] = uint16(nbCommitments) + refs[1] = &p.Parameters.G1.Delta + refs[2] = &p.Parameters.G1.PKK // unique size: private input size, excluding those committed to + refs[3] = &p.Parameters.G1.Z // unique size: N-1 + refs[4] = &p.Parameters.G2.Delta + + refs = utils.AppendRefs(refs, p.Parameters.G1.SigmaCKK) + refs = utils.AppendRefs(refs, p.Parameters.G2.Sigma) + + if len(refs) != expectedLen { + panic("incorrect length estimate") } + return refs +} + +// WriteTo implements io.WriterTo +func (p *Phase2) WriteTo(writer io.Writer) (int64, error) { + + // write the parameters enc := curve.NewEncoder(writer) - for _, v := range toEncode { + for _, v := range p.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil + + //write the proofs + dn, err := p.Delta.WriteTo(writer) + n := enc.BytesWritten() + dn + if err != nil { + return n, err + } + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].WriteTo(writer) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } // ReadFrom implements io.ReaderFrom -func (phase1 *Phase1) ReadFrom(reader io.Reader) (int64, error) { - toEncode := []interface{}{ - &phase1.PublicKeys.Tau.SG, - &phase1.PublicKeys.Tau.SXG, - &phase1.PublicKeys.Tau.XR, - &phase1.PublicKeys.Alpha.SG, - &phase1.PublicKeys.Alpha.SXG, - &phase1.PublicKeys.Alpha.XR, - &phase1.PublicKeys.Beta.SG, - &phase1.PublicKeys.Beta.SXG, - &phase1.PublicKeys.Beta.XR, - &phase1.Parameters.G1.Tau, - &phase1.Parameters.G1.AlphaTau, - &phase1.Parameters.G1.BetaTau, - &phase1.Parameters.G2.Tau, - &phase1.Parameters.G2.Beta, +func (p *Phase2) ReadFrom(reader io.Reader) (int64, error) { + var nbCommitments uint16 + + if err := binary.Read(reader, binary.BigEndian, &nbCommitments); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(2) // we've definitely successfully read 2 bytes + + p.Sigmas = make([]mpcsetup.UpdateProof, nbCommitments) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, nbCommitments) + p.Parameters.G2.Sigma = make([]curve.G2Affine, nbCommitments) dec := curve.NewDecoder(reader) - for _, v := range toEncode { + for _, v := range p.refsSlice()[1:] { // nbCommitments already read if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - phase1.Hash = make([]byte, 32) - nBytes, err := reader.Read(phase1.Hash) - return dec.BytesRead() + int64(nBytes), err -} + n += dec.BytesRead() -// WriteTo implements io.WriterTo -func (phase2 *Phase2) WriteTo(writer io.Writer) (int64, error) { - n, err := phase2.writeTo(writer) + dn, err := p.Delta.ReadFrom(reader) + n += dn if err != nil { return n, err } - nBytes, err := writer.Write(phase2.Hash) - return int64(nBytes) + n, err + + for i := range p.Sigmas { + dn, err = p.Sigmas[i].ReadFrom(reader) + n += dn + if err != nil { + return n, err + } + } + + return n, nil } -func (c *Phase2) writeTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - c.Parameters.G1.L, - c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) refsSlice() []any { + N := uint64(len(c.G1.A)) + expectedLen := 3*N + 3 + refs := make([]any, 3, expectedLen) + refs[0] = &c.G1.CKK + refs[1] = &c.G1.VKK + refs[2] = &c.PublicAndCommitmentCommitted + refs = utils.AppendRefs(refs, c.G1.A) + refs = utils.AppendRefs(refs, c.G1.B) + refs = utils.AppendRefs(refs, c.G2.B) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +// WriteTo implements io.WriterTo +func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2) ReadFrom(reader io.Reader) (int64, error) { - dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.PublicKey.SG, - &c.PublicKey.SXG, - &c.PublicKey.XR, - &c.Parameters.G1.Delta, - &c.Parameters.G1.L, - &c.Parameters.G1.Z, - &c.Parameters.G2.Delta, +func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { + var N uint64 + if err := binary.Read(reader, binary.BigEndian, &N); err != nil { + return -1, err // binary.Read doesn't return the number of bytes read } + n := int64(8) + + c.G1.A = make([]curve.G1Affine, N) + c.G1.B = make([]curve.G1Affine, N) + c.G2.B = make([]curve.G2Affine, N) - for _, v := range toEncode { + dec := curve.NewDecoder(reader) + for _, v := range c.refsSlice()[1:] { if err := dec.Decode(v); err != nil { - return dec.BytesRead(), err + return n + dec.BytesRead(), err } } - c.Hash = make([]byte, 32) - n, err := reader.Read(c.Hash) - return int64(n) + dec.BytesRead(), err - + return n + dec.BytesRead(), nil } -// WriteTo implements io.WriterTo -func (c *Phase2Evaluations) WriteTo(writer io.Writer) (int64, error) { - enc := curve.NewEncoder(writer) - toEncode := []interface{}{ - c.G1.A, - c.G1.B, - c.G2.B, +// refsSlice produces a slice consisting of references to all sub-elements +// prepended by the size parameter, to be used in WriteTo and ReadFrom functions +func (c *SrsCommons) refsSlice() []any { + N := uint64(len(c.G2.Tau)) + expectedLen := 5*N - 1 + // size N 1 + // [β]₂ 1 + // [τⁱ]₁ for 1 ≤ i ≤ 2N-2 2N-2 + // [τⁱ]₂ for 1 ≤ i ≤ N-1 N-1 + // [ατⁱ]₁ for 0 ≤ i ≤ N-1 N + // [βτⁱ]₁ for 0 ≤ i ≤ N-1 N + refs := make([]any, 2, expectedLen) + refs[0] = N + refs[1] = &c.G2.Beta + refs = utils.AppendRefs(refs, c.G1.Tau[1:]) + refs = utils.AppendRefs(refs, c.G2.Tau[1:]) + refs = utils.AppendRefs(refs, c.G1.BetaTau) + refs = utils.AppendRefs(refs, c.G1.AlphaTau) + + if uint64(len(refs)) != expectedLen { + panic("incorrect length estimate") } - for _, v := range toEncode { + return refs +} + +func (c *SrsCommons) WriteTo(writer io.Writer) (int64, error) { + enc := curve.NewEncoder(writer) + for _, v := range c.refsSlice() { if err := enc.Encode(v); err != nil { return enc.BytesWritten(), err } } - return enc.BytesWritten(), nil } // ReadFrom implements io.ReaderFrom -func (c *Phase2Evaluations) ReadFrom(reader io.Reader) (int64, error) { +func (c *SrsCommons) ReadFrom(reader io.Reader) (n int64, err error) { + var N uint64 dec := curve.NewDecoder(reader) - toEncode := []interface{}{ - &c.G1.A, - &c.G1.B, - &c.G2.B, + if err = dec.Decode(&N); err != nil { + return dec.BytesRead(), err } - for _, v := range toEncode { - if err := dec.Decode(v); err != nil { + c.setZero(N) + + for _, v := range c.refsSlice()[1:] { // we've already decoded N + if err = dec.Decode(v); err != nil { return dec.BytesRead(), err } } - return dec.BytesRead(), nil -} +} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl deleted file mode 100644 index eaf6293777..0000000000 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/marshal_test.go.tmpl +++ /dev/null @@ -1,37 +0,0 @@ -import ( - "testing" - - gnarkio "github.com/consensys/gnark/io" - - {{- template "import_curve" . }} - {{- template "import_backend_cs" . }} - "github.com/consensys/gnark/frontend" - "github.com/consensys/gnark/frontend/cs/r1cs" - "github.com/stretchr/testify/require" -) - -func TestContributionSerialization(t *testing.T) { - if testing.Short() { - t.Skip("skipping test in short mode.") - } - assert := require.New(t) - - // Phase 1 - srs1 := InitPhase1(9) - srs1.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs1, func() interface{} { return new(Phase1) })) - - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) - - r1cs := ccs.(*cs.R1CS) - - // Phase 2 - srs2, _ := InitPhase2(r1cs, &srs1) - srs2.Contribute() - - assert.NoError(gnarkio.RoundTripCheck(&srs2, func() interface{} { return new(Phase2) })) -} - diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl index 7c8c6fa540..241c553b04 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase1.go.tmpl @@ -1,187 +1,260 @@ import ( "crypto/sha256" "errors" - "math" + "fmt" + "github.com/consensys/gnark-crypto/ecc" + gcUtils "github.com/consensys/gnark-crypto/utils" "math/big" + "bytes" + "sync" {{- template "import_fr" . }} {{- template "import_curve" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) -// Phase1 represents the Phase1 of the MPC described in +// SrsCommons are the circuit-independent components of the Groth16 SRS, +// computed by the first phase. +// in all that follows, N is the domain size +type SrsCommons struct { + G1 struct { + Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ᴺ⁻²]₁} + AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τᴺ⁻¹]₁} + BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τᴺ⁻¹]₁} + } + G2 struct { + Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τᴺ⁻¹]₂} + Beta curve.G2Affine // [β]₂ + } +} + +// Phase1 in line with Phase1 of the MPC described in // https://eprint.iacr.org/2017/1050.pdf // // Also known as "Powers of Tau" type Phase1 struct { - Parameters struct { - G1 struct { - Tau []curve.G1Affine // {[τ⁰]₁, [τ¹]₁, [τ²]₁, …, [τ²ⁿ⁻²]₁} - AlphaTau []curve.G1Affine // {α[τ⁰]₁, α[τ¹]₁, α[τ²]₁, …, α[τⁿ⁻¹]₁} - BetaTau []curve.G1Affine // {β[τ⁰]₁, β[τ¹]₁, β[τ²]₁, …, β[τⁿ⁻¹]₁} - } - G2 struct { - Tau []curve.G2Affine // {[τ⁰]₂, [τ¹]₂, [τ²]₂, …, [τⁿ⁻¹]₂} - Beta curve.G2Affine // [β]₂ - } - } - PublicKeys struct { - Tau, Alpha, Beta PublicKey + proofs struct { + Tau, Alpha, Beta mpcsetup.UpdateProof } - Hash []byte // sha256 hash + parameters SrsCommons + Challenge []byte // Hash of the transcript PRIOR to this participant } -// InitPhase1 initialize phase 1 of the MPC. This is called once by the coordinator before -// any randomness contribution is made (see Contribute()). -func InitPhase1(power int) (phase1 Phase1) { - N := int(math.Pow(2, float64(power))) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetOne() - alpha.SetOne() - beta.SetOne() - phase1.PublicKeys.Tau = newPublicKey(tau, nil, 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, nil, 2) - phase1.PublicKeys.Beta = newPublicKey(beta, nil, 3) - - // First contribution use generators - _, _, g1, g2 := curve.Generators() - phase1.Parameters.G2.Beta.Set(&g2) - phase1.Parameters.G1.Tau = make([]curve.G1Affine, 2*N-1) - phase1.Parameters.G2.Tau = make([]curve.G2Affine, N) - phase1.Parameters.G1.AlphaTau = make([]curve.G1Affine, N) - phase1.Parameters.G1.BetaTau = make([]curve.G1Affine, N) - for i := 0; i < len(phase1.Parameters.G1.Tau); i++ { - phase1.Parameters.G1.Tau[i].Set(&g1) - } - for i := 0; i < len(phase1.Parameters.G2.Tau); i++ { - phase1.Parameters.G2.Tau[i].Set(&g2) - phase1.Parameters.G1.AlphaTau[i].Set(&g1) - phase1.Parameters.G1.BetaTau[i].Set(&g1) - } - - phase1.Parameters.G2.Beta.Set(&g2) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() - - return +// Contribute contributes randomness to the Phase1 object. This mutates Phase1. +// p is trusted to be well-formed. The ReadFrom function performs such basic sanity checks. +func (p *Phase1) Contribute() { + p.Challenge = p.hash() + + // Generate main value updates + var ( + tauContrib, alphaContrib, betaContrib fr.Element + ) + + p.proofs.Tau = mpcsetup.UpdateValues(&tauContrib, p.Challenge, 1) + p.proofs.Alpha = mpcsetup.UpdateValues(&alphaContrib, p.Challenge, 2) + p.proofs.Beta = mpcsetup.UpdateValues(&betaContrib, p.Challenge, 3) + + p.parameters.update(&tauContrib, &alphaContrib, &betaContrib) } -// Contribute contributes randomness to the phase1 object. This mutates phase1. -func (phase1 *Phase1) Contribute() { - N := len(phase1.Parameters.G2.Tau) - - // Generate key pairs - var tau, alpha, beta fr.Element - tau.SetRandom() - alpha.SetRandom() - beta.SetRandom() - phase1.PublicKeys.Tau = newPublicKey(tau, phase1.Hash[:], 1) - phase1.PublicKeys.Alpha = newPublicKey(alpha, phase1.Hash[:], 2) - phase1.PublicKeys.Beta = newPublicKey(beta, phase1.Hash[:], 3) - - // Compute powers of τ, ατ, and βτ - taus := powers(tau, 2*N-1) - alphaTau := make([]fr.Element, N) - betaTau := make([]fr.Element, N) - for i := 0; i < N; i++ { - alphaTau[i].Mul(&taus[i], &alpha) - betaTau[i].Mul(&taus[i], &beta) - } - - // Update using previous parameters - // TODO @gbotrel working with jacobian points here will help with perf. - scaleG1InPlace(phase1.Parameters.G1.Tau, taus) - scaleG2InPlace(phase1.Parameters.G2.Tau, taus[0:N]) - scaleG1InPlace(phase1.Parameters.G1.AlphaTau, alphaTau) - scaleG1InPlace(phase1.Parameters.G1.BetaTau, betaTau) - var betaBI big.Int - beta.BigInt(&betaBI) - phase1.Parameters.G2.Beta.ScalarMultiplication(&phase1.Parameters.G2.Beta, &betaBI) - - // Compute hash of Contribution - phase1.Hash = phase1.hash() +// setZero instantiates the parameters, and sets all contributions to zero +func (c *SrsCommons) setZero(N uint64) { + c.G1.Tau = make([]curve.G1Affine, 2*N-1) + c.G2.Tau = make([]curve.G2Affine, N) + c.G1.AlphaTau = make([]curve.G1Affine, N) + c.G1.BetaTau = make([]curve.G1Affine, N) + _, _, c.G1.Tau[0], c.G2.Tau[0] = curve.Generators() } -func VerifyPhase1(c0, c1 *Phase1, c ...*Phase1) error { - contribs := append([]*Phase1{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase1(contribs[i], contribs[i+1]); err != nil { - return err +// setOne instantiates the parameters, and sets all contributions to one +func (c *SrsCommons) setOne(N uint64) { + c.setZero(N) + g1, g2 := &c.G1.Tau[0], &c.G2.Tau[0] + setG1 := func(s []curve.G1Affine) { + for i := range s { + s[i].Set(g1) } } - return nil + setG2 := func(s []curve.G2Affine) { + for i := range s { + s[i].Set(g2) + } + } + + setG1(c.G1.Tau[1:]) + setG2(c.G2.Tau[1:]) + setG1(c.G1.AlphaTau) + setG1(c.G1.BetaTau) + c.G2.Beta.Set(g2) } -// verifyPhase1 checks that a contribution is based on a known previous Phase1 state. -func verifyPhase1(current, contribution *Phase1) error { - // Compute R for τ, α, β - tauR := genR(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, current.Hash[:], 1) - alphaR := genR(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, current.Hash[:], 2) - betaR := genR(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, current.Hash[:], 3) +// from the fourth argument on this just gives an opportunity to avoid recomputing some scalar multiplications +func (c *SrsCommons) update(tauUpdate, alphaUpdate, betaUpdate *fr.Element) { - // Check for knowledge of toxic parameters - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.PublicKeys.Tau.XR, tauR) { - return errors.New("couldn't verify public key of τ") + // TODO @gbotrel working with jacobian points here will help with perf. + + tauUpdates := powers(tauUpdate, len(c.G1.Tau)) + + scaleG1InPlace(c.G1.Tau[1:], tauUpdates[1:]) // first element remains 1 + scaleG2InPlace(c.G2.Tau[1:], tauUpdates[1:]) + + alphaUpdates := make([]fr.Element, len(c.G1.AlphaTau)) + alphaUpdates[0].Set(alphaUpdate) + for i := range alphaUpdates { + // let α₁ = α₀.α', τ₁ = τ₀.τ' + // then α₁τ₁ⁱ = (α₀τ₀ⁱ)α'τ'ⁱ + alphaUpdates[i].Mul(&tauUpdates[i], alphaUpdate) } - if !sameRatio(contribution.PublicKeys.Alpha.SG, contribution.PublicKeys.Alpha.SXG, contribution.PublicKeys.Alpha.XR, alphaR) { - return errors.New("couldn't verify public key of α") + scaleG1InPlace(c.G1.AlphaTau, alphaUpdates) + + betaUpdates := make([]fr.Element, len(c.G1.BetaTau)) + betaUpdates[0].Set(betaUpdate) + for i := range betaUpdates { + betaUpdates[i].Mul(&tauUpdates[i], betaUpdate) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.PublicKeys.Beta.XR, betaR) { - return errors.New("couldn't verify public key of β") + scaleG1InPlace(c.G1.BetaTau, betaUpdates) + + var betaUpdateI big.Int + betaUpdate.BigInt(&betaUpdateI) + c.G2.Beta.ScalarMultiplication(&c.G2.Beta, &betaUpdateI) +} + +// Seal performs the final contribution and outputs the final parameters. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase1) Seal(beaconChallenge []byte) SrsCommons { + newContribs := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase 1"), beaconChallenge, 3) + p.parameters.update(&newContribs[0], &newContribs[1], &newContribs[2]) + return p.parameters +} + +// VerifyPhase1 and return the SRS parameters usable for any circuit of domain size N +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase1(N uint64, beaconChallenge []byte, c ...*Phase1) (SrsCommons, error) { + prev := NewPhase1(N) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return SrsCommons{}, err + } + prev = c[i] } + return prev.Seal(beaconChallenge), nil +} + +// Verify assumes previous is correct +func (p *Phase1) Verify(next *Phase1, options ...verificationOption) error { - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Tau[1], current.Parameters.G1.Tau[1], tauR, contribution.PublicKeys.Tau.XR) { - return errors.New("couldn't verify that [τ]₁ is based on previous contribution") + challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") } - if !sameRatio(contribution.Parameters.G1.AlphaTau[0], current.Parameters.G1.AlphaTau[0], alphaR, contribution.PublicKeys.Alpha.XR) { - return errors.New("couldn't verify that [α]₁ is based on previous contribution") + next.Challenge = challenge + + // the internal consistency of the vector sizes in next is assumed + // so is its well-formedness i.e. Tau[0] = 1 + // it remains to check it is consistent with p + N := len(next.parameters.G2.Tau) + if N != len(p.parameters.G2.Tau) { + return errors.New("domain size mismatch") } - if !sameRatio(contribution.Parameters.G1.BetaTau[0], current.Parameters.G1.BetaTau[0], betaR, contribution.PublicKeys.Beta.XR) { - return errors.New("couldn't verify that [β]₁ is based on previous contribution") + + // verify updates to τ, α, β + if err := next.proofs.Tau.Verify(challenge, 1, mpcsetup.ValueUpdate{Previous: &p.parameters.G1.Tau[1], Next: &next.parameters.G1.Tau[1]}); err != nil { + return fmt.Errorf("failed to verify contribution to τ: %w", err) } - if !sameRatio(contribution.PublicKeys.Tau.SG, contribution.PublicKeys.Tau.SXG, contribution.Parameters.G2.Tau[1], current.Parameters.G2.Tau[1]) { - return errors.New("couldn't verify that [τ]₂ is based on previous contribution") + if err := next.proofs.Alpha.Verify(challenge, 2, mpcsetup.ValueUpdate{Previous: p.parameters.G1.AlphaTau[0], Next: next.parameters.G1.AlphaTau[0]}); err != nil { + return fmt.Errorf("failed to verify contribution to α: %w", err) } - if !sameRatio(contribution.PublicKeys.Beta.SG, contribution.PublicKeys.Beta.SXG, contribution.Parameters.G2.Beta, current.Parameters.G2.Beta) { - return errors.New("couldn't verify that [β]₂ is based on previous contribution") + if err := next.proofs.Beta.Verify(challenge, 3, []mpcsetup.ValueUpdate{ + {Previous: &p.parameters.G1.BetaTau[0], Next: &next.parameters.G1.BetaTau[0]}, + {Previous: &p.parameters.G2.Beta, Next: &next.parameters.G2.Beta}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to β: %w", err) } - // Check for valid updates using powers of τ - _, _, g1, g2 := curve.Generators() - tauL1, tauL2 := linearCombinationG1(contribution.Parameters.G1.Tau) - if !sameRatio(tauL1, tauL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of τ in G₁") + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) } - alphaL1, alphaL2 := linearCombinationG1(contribution.Parameters.G1.AlphaTau) - if !sameRatio(alphaL1, alphaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - betaL1, betaL2 := linearCombinationG1(contribution.Parameters.G1.BetaTau) - if !sameRatio(betaL1, betaL2, contribution.Parameters.G2.Tau[1], g2) { - return errors.New("couldn't verify valid powers of α(τ) in G₁") - } - tau2L1, tau2L2 := linearCombinationG2(contribution.Parameters.G2.Tau) - if !sameRatio(contribution.Parameters.G1.Tau[1], g1, tau2L1, tau2L2) { - return errors.New("couldn't verify valid powers of τ in G₂") + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() } - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") + var wg [4]*sync.WaitGroup + subGroupCheckErrors := make(chan error, 4) + wg[0] = areInSubGroupG1(wp, next.parameters.G1.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₁ representation not in subgroup", i+2) + }) + wg[1] = areInSubGroupG1(wp, next.parameters.G1.AlphaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[ατ^%d]₁ representation not in subgroup", i+1) + }) + wg[2] = areInSubGroupG1(wp, next.parameters.G1.BetaTau[1:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[βτ^%d]₁ representation not in subgroup", i+1) + }) + wg[3] = areInSubGroupG2(wp, next.parameters.G2.Tau[2:], func(i int) { + subGroupCheckErrors <- fmt.Errorf("[τ^%d]₂ representation not in subgroup", i+2) + }) + + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err } } - return nil + return mpcsetup.SameRatioMany( + p.parameters.G1.Tau, + p.parameters.G2.Tau, + p.parameters.G1.AlphaTau, + p.parameters.G1.BetaTau, + ) } -func (phase1 *Phase1) hash() []byte { +func (p *Phase1) hash() []byte { sha := sha256.New() - phase1.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) } + +// Initialize an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func (p *Phase1) Initialize(N uint64) { + if ecc.NextPowerOfTwo(N) != N { + panic("N must be a power of 2") + } + p.parameters.setOne(N) +} + +// NewPhase1 creates an empty Phase1 contribution object +// to be used by the first contributor or the verifier +// N is the FFT domain size +func NewPhase1(N uint64) *Phase1 { + res := new(Phase1) + res.Initialize(N) + return res +} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl index 0afb32db79..f6def165bc 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/phase2.go.tmpl @@ -1,48 +1,199 @@ import ( + "bytes" "crypto/sha256" "errors" + "github.com/consensys/gnark/internal/utils" "math/big" - + "fmt" + "slices" + "sync" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark/backend/groth16/internal" "github.com/consensys/gnark/constraint" - {{- template "import_fr" . }} {{- template "import_curve" . }} {{- template "import_backend_cs" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" + gcUtils "github.com/consensys/gnark-crypto/utils" ) -type Phase2Evaluations struct { +// Phase2Evaluations components of the circuit keys +// not depending on Phase2 randomisations +type Phase2Evaluations struct { // TODO @Tabaie rename G1 struct { - A, B, VKK []curve.G1Affine + A []curve.G1Affine // A are the left coefficient polynomials for each witness element, evaluated at τ + B []curve.G1Affine // B are the right coefficient polynomials for each witness element, evaluated at τ + VKK []curve.G1Affine // VKK are the coefficients of the public witness and commitments + CKK [][]curve.G1Affine // CKK are the coefficients of the committed values } G2 struct { - B []curve.G2Affine + B []curve.G2Affine // B are the right coefficient polynomials for each witness element, evaluated at τ } + PublicAndCommitmentCommitted [][]int } type Phase2 struct { Parameters struct { G1 struct { - Delta curve.G1Affine - L, Z []curve.G1Affine + Delta curve.G1Affine + Z []curve.G1Affine // Z[i] = xⁱt(x)/δ where t is the domain vanishing polynomial 0 ≤ i ≤ N-2 + PKK []curve.G1Affine // PKK are the coefficients of the private witness, needed for the proving key. They have a denominator of δ + SigmaCKK [][]curve.G1Affine // Commitment proof bases: SigmaCKK[i][j] = σᵢCᵢⱼ where Cᵢⱼ is the commitment basis for the jᵗʰ committed element from the iᵗʰ commitment } G2 struct { Delta curve.G2Affine + Sigma []curve.G2Affine // the secret σ value for each commitment + } + } + + // Proofs of update correctness + Sigmas []mpcsetup.UpdateProof + Delta mpcsetup.UpdateProof + + // Challenge is the hash of the PREVIOUS contribution + Challenge []byte +} + +func (p *Phase2) Verify(next *Phase2, options ...verificationOption) error { +challenge := p.hash() + if len(next.Challenge) != 0 && !bytes.Equal(next.Challenge, challenge) { + return errors.New("the challenge does not match the previous contribution's hash") + } + next.Challenge = challenge + + if len(next.Parameters.G1.Z) != len(p.Parameters.G1.Z) || + len(next.Parameters.G1.PKK) != len(p.Parameters.G1.PKK) || + len(next.Parameters.G1.SigmaCKK) != len(p.Parameters.G1.SigmaCKK) || + len(next.Parameters.G2.Sigma) != len(p.Parameters.G2.Sigma) { + return errors.New("contribution size mismatch") + } + + // check subgroup membership + var settings verificationSettings + for _, opt := range options { + opt(&settings) + } + wp := settings.wp + if wp == nil { + wp = gcUtils.NewWorkerPool() + defer wp.Stop() + } + + subGroupCheckErrors := make(chan error, 2+len(p.Sigmas)) + subGroupErrorReporterNoOffset := func(format string) func(int) { + return func(i int) { + subGroupCheckErrors <- fmt.Errorf(format+" representation not in subgroup", i) } } - PublicKey PublicKey - Hash []byte + + wg := make([]*sync.WaitGroup, 2+len(p.Sigmas)) + wg[0] = areInSubGroupG1(wp, next.Parameters.G1.Z, subGroupErrorReporterNoOffset("[Z[%d]]₁")) + wg[1] = areInSubGroupG1(wp, next.Parameters.G1.PKK, subGroupErrorReporterNoOffset("[PKK[%d]]₁")) + for i := range p.Sigmas { + wg[2+i] = areInSubGroupG1(wp, next.Parameters.G1.SigmaCKK[i], subGroupErrorReporterNoOffset("[σCKK[%d]]₁ (commitment proving key)")) + } + for _, wg := range wg { + wg.Wait() + } + close(subGroupCheckErrors) + for err := range subGroupCheckErrors { + if err != nil { + return err + } + } + + // verify proof of knowledge of contributions to the σᵢ + // and the correctness of updates to Parameters.G2.Sigma[i] and the Parameters.G1.SigmaCKK[i] + for i := range p.Sigmas { // match the first commitment basis elem against the contribution commitment + if err := next.Sigmas[i].Verify(challenge, 2+byte(i), + mpcsetup.ValueUpdate{Previous: p.Parameters.G1.SigmaCKK[i], Next: next.Parameters.G1.SigmaCKK[i]}, + mpcsetup.ValueUpdate{Previous: &p.Parameters.G2.Sigma[i], Next: &next.Parameters.G2.Sigma[i]}); err != nil { + return fmt.Errorf("failed to verify contribution to σ[%d]: %w", i, err) + } + } + + // verify proof of knowledge of contribution to δ + // and the correctness of updates to Parameters.Gi.Delta, PKK[i], and Z[i] + if err := next.Delta.Verify(challenge, 1, []mpcsetup.ValueUpdate{ + {Previous: &p.Parameters.G1.Delta, Next: &next.Parameters.G1.Delta}, + {Previous: &p.Parameters.G2.Delta, Next: &next.Parameters.G2.Delta}, + {Previous: next.Parameters.G1.Z, Next: p.Parameters.G1.Z}, // since these have δ in their denominator, we will do it "backwards" + {Previous: next.Parameters.G1.PKK, Next: p.Parameters.G1.PKK}, + }...); err != nil { + return fmt.Errorf("failed to verify contribution to δ: %w", err) + } + + return nil +} + +// update modifies delta +func (p *Phase2) update(delta *fr.Element, sigma []fr.Element) { + var I big.Int + + scale := func(point any) { + switch p := point.(type) { + case *curve.G1Affine: + p.ScalarMultiplication(p, &I) + case *curve.G2Affine: + p.ScalarMultiplication(p, &I) + default: + panic("unknown type") + } + } + scaleG1Slice := func(s []curve.G1Affine) { + utils.Parallelize(len(s), func(start, end int) { + for i := start; i < end; i++ { + s[i].ScalarMultiplication(&s[i], &I) + } + }) + } + + for i := range sigma { + sigma[i].BigInt(&I) + scale(&p.Parameters.G2.Sigma[i]) + scaleG1Slice(p.Parameters.G1.SigmaCKK[i]) + } + + delta.BigInt(&I) + scale(&p.Parameters.G2.Delta) + scale(&p.Parameters.G1.Delta) + + delta.Inverse(delta) + delta.BigInt(&I) + scaleG1Slice(p.Parameters.G1.Z) + scaleG1Slice(p.Parameters.G1.PKK) } -func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { - srs := srs1.Parameters - size := len(srs.G1.AlphaTau) +func (p *Phase2) Contribute() { + p.Challenge = p.hash() + + // sample value contributions and provide correctness proofs + var delta fr.Element + p.Delta = mpcsetup.UpdateValues(&delta, p.Challenge, 1) + + sigma := make([]fr.Element, len(p.Parameters.G1.SigmaCKK)) + if len(sigma) > 255 { + panic("too many commitments") // DST collision + } + for i := range sigma { + p.Sigmas[i] = mpcsetup.UpdateValues(&sigma[i], p.Challenge, byte(2+i)) + } + + p.update(&delta, sigma) +} + +// Initialize is to be run by the coordinator +// It involves no coin tosses. A verifier should +// simply rerun all the steps +// TODO @Tabaie option to only compute the phase 2 info and not the evaluations, for a contributor +func (p *Phase2) Initialize(r1cs *cs.R1CS, commons *SrsCommons) Phase2Evaluations { + + size := len(commons.G1.AlphaTau) if size < r1cs.GetNbConstraints() { panic("Number of constraints is larger than expected") } - c2 := Phase2{} - accumulateG1 := func(res *curve.G1Affine, t constraint.Term, value *curve.G1Affine) { cID := t.CoeffID() switch cID { @@ -84,26 +235,30 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } // Prepare Lagrange coefficients of [τ...]₁, [τ...]₂, [ατ...]₁, [βτ...]₁ - coeffTau1 := lagrangeCoeffsG1(srs.G1.Tau, size) - coeffTau2 := lagrangeCoeffsG2(srs.G2.Tau, size) - coeffAlphaTau1 := lagrangeCoeffsG1(srs.G1.AlphaTau, size) - coeffBetaTau1 := lagrangeCoeffsG1(srs.G1.BetaTau, size) + coeffTau1 := lagrangeCoeffsG1(commons.G1.Tau, size) // [L_{ω⁰}(τ)]₁, [L_{ω¹}(τ)]₁, ... where ω is a primitive sizeᵗʰ root of unity + coeffTau2 := lagrangeCoeffsG2(commons.G2.Tau, size) // [L_{ω⁰}(τ)]₂, [L_{ω¹}(τ)]₂, ... + coeffAlphaTau1 := lagrangeCoeffsG1(commons.G1.AlphaTau, size) // [L_{ω⁰}(ατ)]₁, [L_{ω¹}(ατ)]₁, ... + coeffBetaTau1 := lagrangeCoeffsG1(commons.G1.BetaTau, size) // [L_{ω⁰}(βτ)]₁, [L_{ω¹}(βτ)]₁, ... - internal, secret, public := r1cs.GetNbVariables() - nWires := internal + secret + public + nbInternal, nbSecret, nbPublic := r1cs.GetNbVariables() + nWires := nbInternal + nbSecret + nbPublic var evals Phase2Evaluations - evals.G1.A = make([]curve.G1Affine, nWires) - evals.G1.B = make([]curve.G1Affine, nWires) - evals.G2.B = make([]curve.G2Affine, nWires) + commitmentInfo := r1cs.CommitmentInfo.(constraint.Groth16Commitments) + evals.PublicAndCommitmentCommitted = commitmentInfo.GetPublicAndCommitmentCommitted(commitmentInfo.CommitmentIndexes(), nbPublic) + evals.G1.A = make([]curve.G1Affine, nWires) // recall: A are the left coefficients in DIZK parlance + evals.G1.B = make([]curve.G1Affine, nWires) // recall: B are the right coefficients in DIZK parlance + evals.G2.B = make([]curve.G2Affine, nWires) // recall: A only appears in 𝔾₁ elements in the proof, but B needs to appear in a 𝔾₂ element so the verifier can compute something resembling (A.x).(B.x) via pairings bA := make([]curve.G1Affine, nWires) aB := make([]curve.G1Affine, nWires) C := make([]curve.G1Affine, nWires) - // TODO @gbotrel use constraint iterator when available. - i := 0 it := r1cs.GetR1CIterator() - for c := it.Next(); c!=nil; c = it.Next() { + for c := it.Next(); c != nil; c = it.Next() { + // each constraint is sparse, i.e. involves a small portion of all variables. + // so we iterate over the variables involved and add the constraint's contribution + // to every variable's A, B, and C values + // A for _, t := range c.L { accumulateG1(&evals.G1.A[t.WireID()], t, &coeffTau1[i]) @@ -121,128 +276,99 @@ func InitPhase2(r1cs *cs.R1CS, srs1 *Phase1) (Phase2, Phase2Evaluations) { } i++ } - + // Prepare default contribution _, _, g1, g2 := curve.Generators() - c2.Parameters.G1.Delta = g1 - c2.Parameters.G2.Delta = g2 + p.Parameters.G1.Delta = g1 + p.Parameters.G2.Delta = g2 // Build Z in PK as τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] // τⁱ(τⁿ - 1) = τ⁽ⁱ⁺ⁿ⁾ - τⁱ for i ∈ [0, n-2] - n := len(srs.G1.AlphaTau) - c2.Parameters.G1.Z = make([]curve.G1Affine, n) - for i := 0; i < n-1; i++ { - c2.Parameters.G1.Z[i].Sub(&srs.G1.Tau[i+n], &srs.G1.Tau[i]) - } - bitReverse(c2.Parameters.G1.Z) - c2.Parameters.G1.Z = c2.Parameters.G1.Z[:n-1] - - // Evaluate L - nPrivate := internal + secret - c2.Parameters.G1.L = make([]curve.G1Affine, nPrivate) - evals.G1.VKK = make([]curve.G1Affine, public) - offset := public - for i := 0; i < nWires; i++ { - var tmp curve.G1Affine - tmp.Add(&bA[i], &aB[i]) - tmp.Add(&tmp, &C[i]) - if i < public { - evals.G1.VKK[i].Set(&tmp) - } else { - c2.Parameters.G1.L[i-offset].Set(&tmp) - } + n := len(commons.G1.AlphaTau) + p.Parameters.G1.Z = make([]curve.G1Affine, n) + for i := range n - 1 { + p.Parameters.G1.Z[i].Sub(&commons.G1.Tau[i+n], &commons.G1.Tau[i]) } - // Set δ public key - var delta fr.Element - delta.SetOne() - c2.PublicKey = newPublicKey(delta, nil, 1) - - // Hash initial contribution - c2.Hash = c2.hash() - return c2, evals -} + bitReverse(p.Parameters.G1.Z) + p.Parameters.G1.Z = p.Parameters.G1.Z[:n-1] -func (c *Phase2) Contribute() { - // Sample toxic δ - var delta, deltaInv fr.Element - var deltaBI, deltaInvBI big.Int - delta.SetRandom() - deltaInv.Inverse(&delta) + commitments := r1cs.CommitmentInfo.(constraint.Groth16Commitments) - delta.BigInt(&deltaBI) - deltaInv.BigInt(&deltaInvBI) + evals.G1.CKK = make([][]curve.G1Affine, len(commitments)) + p.Sigmas = make([]mpcsetup.UpdateProof, len(commitments)) + p.Parameters.G1.SigmaCKK = make([][]curve.G1Affine, len(commitments)) + p.Parameters.G2.Sigma = make([]curve.G2Affine, len(commitments)) - // Set δ public key - c.PublicKey = newPublicKey(delta, c.Hash, 1) - - // Update δ - c.Parameters.G1.Delta.ScalarMultiplication(&c.Parameters.G1.Delta, &deltaBI) - c.Parameters.G2.Delta.ScalarMultiplication(&c.Parameters.G2.Delta, &deltaBI) - - // Update Z using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.Z); i++ { - c.Parameters.G1.Z[i].ScalarMultiplication(&c.Parameters.G1.Z[i], &deltaInvBI) + for j := range commitments { + evals.G1.CKK[j] = make([]curve.G1Affine, 0, len(commitments[j].PrivateCommitted)) + p.Parameters.G2.Sigma[j] = g2 } - // Update L using δ⁻¹ - for i := 0; i < len(c.Parameters.G1.L); i++ { - c.Parameters.G1.L[i].ScalarMultiplication(&c.Parameters.G1.L[i], &deltaInvBI) - } + nbCommitted := internal.NbElements(commitments.GetPrivateCommitted()) - // 4. Hash contribution - c.Hash = c.hash() -} + // Evaluate PKK -func VerifyPhase2(c0, c1 *Phase2, c ...*Phase2) error { - contribs := append([]*Phase2{c0, c1}, c...) - for i := 0; i < len(contribs)-1; i++ { - if err := verifyPhase2(contribs[i], contribs[i+1]); err != nil { - return err + p.Parameters.G1.PKK = make([]curve.G1Affine, 0, nbInternal+nbSecret-nbCommitted-len(commitments)) + evals.G1.VKK = make([]curve.G1Affine, 0, nbPublic+len(commitments)) + committedIterator := internal.NewMergeIterator(commitments.GetPrivateCommitted()) + nbCommitmentsSeen := 0 + for j := 0; j < nWires; j++ { + // since as yet δ, γ = 1, the VKK and PKK are computed identically, as βA + αB + C + var tmp curve.G1Affine + tmp.Add(&bA[j], &aB[j]) + tmp.Add(&tmp, &C[j]) + commitmentIndex := committedIterator.IndexIfNext(j) + isCommitment := nbCommitmentsSeen < len(commitments) && commitments[nbCommitmentsSeen].CommitmentIndex == j + if commitmentIndex != -1 { + evals.G1.CKK[commitmentIndex] = append(evals.G1.CKK[commitmentIndex], tmp) + } else if j < nbPublic || isCommitment { + evals.G1.VKK = append(evals.G1.VKK, tmp) + } else { + p.Parameters.G1.PKK = append(p.Parameters.G1.PKK, tmp) + } + if isCommitment { + nbCommitmentsSeen++ } } - return nil -} -func verifyPhase2(current, contribution *Phase2) error { - // Compute R for δ - deltaR := genR(contribution.PublicKey.SG, contribution.PublicKey.SXG, current.Hash[:], 1) - - // Check for knowledge of δ - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.PublicKey.XR, deltaR) { - return errors.New("couldn't verify knowledge of δ") + for j := range commitments { + p.Parameters.G1.SigmaCKK[j] = slices.Clone(evals.G1.CKK[j]) } - // Check for valid updates using previous parameters - if !sameRatio(contribution.Parameters.G1.Delta, current.Parameters.G1.Delta, deltaR, contribution.PublicKey.XR) { - return errors.New("couldn't verify that [δ]₁ is based on previous contribution") - } - if !sameRatio(contribution.PublicKey.SG, contribution.PublicKey.SXG, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify that [δ]₂ is based on previous contribution") - } + p.Challenge = nil - // Check for valid updates of L and Z using - L, prevL := merge(contribution.Parameters.G1.L, current.Parameters.G1.L) - if !sameRatio(L, prevL, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } - Z, prevZ := merge(contribution.Parameters.G1.Z, current.Parameters.G1.Z) - if !sameRatio(Z, prevZ, contribution.Parameters.G2.Delta, current.Parameters.G2.Delta) { - return errors.New("couldn't verify valid updates of L using δ⁻¹") - } + return evals +} - // Check hash of the contribution - h := contribution.hash() - for i := 0; i < len(h); i++ { - if h[i] != contribution.Hash[i] { - return errors.New("couldn't verify hash of contribution") +// VerifyPhase2 for circuit described by r1cs +// using parameters from commons +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// and c are the output from the contributors +// WARNING: the last contribution object will be modified +func VerifyPhase2(r1cs *cs.R1CS, commons *SrsCommons, beaconChallenge []byte, c ...*Phase2) (groth16.ProvingKey, groth16.VerifyingKey, error) { + prev := new(Phase2) + evals := prev.Initialize(r1cs, commons) + wp := gcUtils.NewWorkerPool() + defer wp.Stop() + for i := range c { + if err := prev.Verify(c[i], WithWorkerPool(wp)); err != nil { + return nil, nil, err } + prev = c[i] } - return nil + pk, vk := prev.Seal(commons, &evals, beaconChallenge) + return pk, vk, nil } -func (c *Phase2) hash() []byte { +func (p *Phase2) hash() []byte { sha := sha256.New() - c.writeTo(sha) + if _, err := p.WriteTo(sha); err != nil { + panic(err) + } + sha.Write(p.Challenge) return sha.Sum(nil) -} +} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl index e60410b467..a1367e6c9b 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup.go.tmpl @@ -1,24 +1,47 @@ import ( - groth16 "github.com/consensys/gnark/backend/groth16/{{toLower .Curve}}" + groth16Impl "github.com/consensys/gnark/backend/groth16/{{toLower .Curve}}" + "github.com/consensys/gnark/backend/groth16" + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/fr/pedersen" {{- template "import_curve" . }} {{- template "import_fft" . }} + "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/mpcsetup" ) -func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstraints int) (pk groth16.ProvingKey, vk groth16.VerifyingKey) { +// Seal performs the final contribution and outputs the proving and verifying keys. +// No randomization is performed at this step. +// A verifier should simply re-run this and check +// that it produces the same values. +// beaconChallenge is a random beacon of moderate entropy evaluated at a time later than the latest contribution. +// It seeds a final "contribution" to the protocol, reproducible by any verifier. +// For more information on random beacons, refer to https://a16zcrypto.com/posts/article/public-randomness-and-randomness-beacons/ +// Organizations such as the League of Entropy (https://leagueofentropy.com/) provide such beacons. THIS IS NOT A RECOMMENDATION OR ENDORSEMENT. +// WARNING: Seal modifies p, just as Contribute does. +// The result will be an INVALID Phase1 object, since no proof of correctness is produced. +func (p *Phase2) Seal(commons *SrsCommons, evals *Phase2Evaluations, beaconChallenge []byte) (groth16.ProvingKey, groth16.VerifyingKey) { + + // final contributions + contributions := mpcsetup.BeaconContributions(p.hash(), []byte("Groth16 MPC Setup - Phase2"), beaconChallenge, 1+len(p.Sigmas)) + p.update(&contributions[0], contributions[1:]) + _, _, _, g2 := curve.Generators() + var ( + pk groth16Impl.ProvingKey + vk groth16Impl.VerifyingKey + ) + // Initialize PK - pk.Domain = *fft.NewDomain(uint64(nConstraints)) - pk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - pk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - pk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - pk.G1.Z = srs2.Parameters.G1.Z + pk.Domain = *fft.NewDomain(uint64(len(commons.G1.AlphaTau))) + pk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + pk.G1.Beta.Set(&commons.G1.BetaTau[0]) + pk.G1.Delta.Set(&p.Parameters.G1.Delta) + pk.G1.Z = p.Parameters.G1.Z bitReverse(pk.G1.Z) - pk.G1.K = srs2.Parameters.G1.L - pk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - pk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + pk.G1.K = p.Parameters.G1.PKK + pk.G2.Beta.Set(&commons.G2.Beta) + pk.G2.Delta.Set(&p.Parameters.G2.Delta) // Filter out infinity points nWires := len(evals.G1.A) @@ -63,18 +86,29 @@ func ExtractKeys(srs1 *Phase1, srs2 *Phase2, evals *Phase2Evaluations, nConstrai pk.G2.B = B2[:j] // Initialize VK - vk.G1.Alpha.Set(&srs1.Parameters.G1.AlphaTau[0]) - vk.G1.Beta.Set(&srs1.Parameters.G1.BetaTau[0]) - vk.G1.Delta.Set(&srs2.Parameters.G1.Delta) - vk.G2.Beta.Set(&srs1.Parameters.G2.Beta) - vk.G2.Delta.Set(&srs2.Parameters.G2.Delta) + vk.G1.Alpha.Set(&commons.G1.AlphaTau[0]) + vk.G1.Beta.Set(&commons.G1.BetaTau[0]) + vk.G1.Delta.Set(&p.Parameters.G1.Delta) + vk.G2.Beta.Set(&commons.G2.Beta) + vk.G2.Delta.Set(&p.Parameters.G2.Delta) vk.G2.Gamma.Set(&g2) vk.G1.K = evals.G1.VKK + vk.CommitmentKeys = make([]pedersen.VerifyingKey, len(evals.G1.CKK)) + pk.CommitmentKeys = make([]pedersen.ProvingKey, len(evals.G1.CKK)) + for i := range vk.CommitmentKeys { + vk.CommitmentKeys[i].G = g2 + vk.CommitmentKeys[i].GSigmaNeg.Neg(&p.Parameters.G2.Sigma[i]) + + pk.CommitmentKeys[i].Basis = evals.G1.CKK[i] + pk.CommitmentKeys[i].BasisExpSigma = p.Parameters.G1.SigmaCKK[i] + } + vk.PublicAndCommitmentCommitted = evals.PublicAndCommitmentCommitted + // sets e, -[δ]2, -[γ]2 if err := vk.Precompute(); err != nil { panic(err) } - return pk, vk -} + return &pk, &vk +} \ No newline at end of file diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl index a36c0c1b9d..bfd0ff50f2 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/setup_test.go.tmpl @@ -1,5 +1,10 @@ import ( "testing" + "bytes" + "github.com/consensys/gnark-crypto/ecc" + "io" + "slices" + "sync" {{- template "import_fr" . }} {{- template "import_curve" . }} @@ -14,76 +19,76 @@ import ( native_mimc "github.com/consensys/gnark-crypto/ecc/{{toLower .Curve}}/fr/mimc" ) -func TestSetupCircuit(t *testing.T) { - {{- if ne (toLower .Curve) "bn254" }} - if testing.Short() { - t.Skip() - } - {{- end}} - const ( - nContributionsPhase1 = 3 - nContributionsPhase2 = 3 - power = 9 - ) +// TestAll a full integration test of the MPC setup +func TestAll(t *testing.T) { + testAll(t, 3, 3) +} +func testAll(t *testing.T, nbContributionsPhase1, nbContributionsPhase2 int) { assert := require.New(t) - srs1 := InitPhase1(power) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase1; i++ { - // we clone test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs1.clone() - - srs1.Contribute() - assert.NoError(VerifyPhase1(&prev, &srs1)) - } - // Compile the circuit - var myCircuit Circuit - ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) - assert.NoError(err) + ccs := getTestCircuit() - var evals Phase2Evaluations - r1cs := ccs.(*cs.R1CS) + domainSize := ecc.NextPowerOfTwo(uint64(ccs.GetNbConstraints())) - // Prepare for phase-2 - srs2, evals := InitPhase2(r1cs, &srs1) - - // Make and verify contributions for phase1 - for i := 1; i < nContributionsPhase2; i++ { - // we clone for test purposes; but in practice, participant will receive a []byte, deserialize it, - // add his contribution and send back to coordinator. - prev := srs2.clone() + var ( + bb bytes.Buffer // simulating network communications + p1 Phase1 + p2 Phase2 + ) + serialized := make([][]byte, max(nbContributionsPhase1, nbContributionsPhase2)) + phase1 := make([]*Phase1, nbContributionsPhase1) + phase2 := make([]*Phase2, nbContributionsPhase2) + + serialize := func(v io.WriterTo) []byte { + bb.Reset() + _, err := v.WriteTo(&bb) + assert.NoError(err) + return slices.Clone(bb.Bytes()) + } + deserialize := func(v io.ReaderFrom, b []byte) { + n, err := v.ReadFrom(bytes.NewReader(b)) + assert.NoError(err) + assert.Equal(len(b), int(n)) + } - srs2.Contribute() - assert.NoError(VerifyPhase2(&prev, &srs2)) + p1.Initialize(domainSize) + for i := range phase1 { + p1.Contribute() + serialized[i] = serialize(&p1) } - // Extract the proving and verifying keys - pk, vk := ExtractKeys(&srs1, &srs2, &evals, ccs.GetNbConstraints()) + // read all Phase1 objects + for i := range phase1 { + phase1[i] = new(Phase1) + deserialize(phase1[i], serialized[i]) + } - // Build the witness - var preImage, hash fr.Element + // Verify contributions for phase 1 and generate non-circuit-specific parameters + srsCommons, err := VerifyPhase1(domainSize, []byte("testing phase1"), phase1[:]...) + assert.NoError(err) { - m := native_mimc.NewMiMC() - m.Write(preImage.Marshal()) - hash.SetBytes(m.Sum(nil)) + var commonsRead SrsCommons + deserialize(&commonsRead, serialize(&srsCommons)) + srsCommons = commonsRead } - - witness, err := frontend.NewWitness(&Circuit{PreImage: preImage, Hash: hash}, curve.ID.ScalarField()) - assert.NoError(err) - pubWitness, err := witness.Public() - assert.NoError(err) + p2.Initialize(ccs, &srsCommons) + for i := range phase2 { + p2.Contribute() + serialized[i] = serialize(&p2) + } - // groth16: ensure proof is verified - proof, err := groth16.Prove(ccs, &pk, witness) - assert.NoError(err) + for i := range phase2 { + phase2[i] = new(Phase2) + deserialize(phase2[i], serialized[i]) + } - err = groth16.Verify(proof, &vk, pubWitness) + pk, vk, err := VerifyPhase2(ccs, &srsCommons, []byte("testing phase2"), phase2[:]...) assert.NoError(err) + + proveVerifyCircuit(t, pk, vk) } func BenchmarkPhase1(b *testing.B) { @@ -91,13 +96,15 @@ func BenchmarkPhase1(b *testing.B) { b.Run("init", func(b *testing.B) { b.ResetTimer() + var srs1 Phase1 for i := 0; i < b.N; i++ { - _ = InitPhase1(power) + srs1.Initialize(1 << power) } }) b.Run("contrib", func(b *testing.B) { - srs1 := InitPhase1(power) + var srs1 Phase1 + srs1.Initialize(1 << power) b.ResetTimer() for i := 0; i < b.N; i++ { srs1.Contribute() @@ -108,8 +115,10 @@ func BenchmarkPhase1(b *testing.B) { func BenchmarkPhase2(b *testing.B) { const power = 14 - srs1 := InitPhase1(power) - srs1.Contribute() + var p1 Phase1 + p1.Initialize(1 << power) + p1.Contribute() + commons := p1.Seal([]byte("testing")) var myCircuit Circuit ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &myCircuit) @@ -120,17 +129,19 @@ func BenchmarkPhase2(b *testing.B) { r1cs := ccs.(*cs.R1CS) b.Run("init", func(b *testing.B) { + var p Phase2 b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = InitPhase2(r1cs, &srs1) + p.Initialize(r1cs, &commons) } }) b.Run("contrib", func(b *testing.B) { - srs2, _ := InitPhase2(r1cs, &srs1) + var p Phase2 + p.Initialize(r1cs, &commons) b.ResetTimer() for i := 0; i < b.N; i++ { - srs2.Contribute() + p.Contribute() } }) @@ -153,32 +164,49 @@ func (circuit *Circuit) Define(api frontend.API) error { mimc.Write(circuit.PreImage) api.AssertIsEqual(circuit.Hash, mimc.Sum()) - return nil + c, err := api.(frontend.Committer).Commit(circuit.PreImage, circuit.Hash) + api.AssertIsDifferent(c, 0) + + return err } -func (phase1 *Phase1) clone() Phase1 { - r := Phase1{} - r.Parameters.G1.Tau = append(r.Parameters.G1.Tau, phase1.Parameters.G1.Tau...) - r.Parameters.G1.AlphaTau = append(r.Parameters.G1.AlphaTau, phase1.Parameters.G1.AlphaTau...) - r.Parameters.G1.BetaTau = append(r.Parameters.G1.BetaTau, phase1.Parameters.G1.BetaTau...) +func assignCircuit() frontend.Circuit { + + // Build the witness + var preImage, hash fr.Element + + m := native_mimc.NewMiMC() + m.Write(preImage.Marshal()) + hash.SetBytes(m.Sum(nil)) + + return &Circuit{PreImage: preImage, Hash: hash} - r.Parameters.G2.Tau = append(r.Parameters.G2.Tau, phase1.Parameters.G2.Tau...) - r.Parameters.G2.Beta = phase1.Parameters.G2.Beta +} - r.PublicKeys = phase1.PublicKeys - r.Hash = append(r.Hash, phase1.Hash...) +var onceCircuit = sync.OnceValue(func() *cs.R1CS { + ccs, err := frontend.Compile(curve.ID.ScalarField(), r1cs.NewBuilder, &Circuit{}) + if err != nil { + panic(err) + } + return ccs.(*cs.R1CS) +}) - return r +func getTestCircuit() *cs.R1CS { + return onceCircuit() } -func (phase2 *Phase2) clone() Phase2 { - r := Phase2{} - r.Parameters.G1.Delta = phase2.Parameters.G1.Delta - r.Parameters.G1.L = append(r.Parameters.G1.L, phase2.Parameters.G1.L...) - r.Parameters.G1.Z = append(r.Parameters.G1.Z, phase2.Parameters.G1.Z...) - r.Parameters.G2.Delta = phase2.Parameters.G2.Delta - r.PublicKey = phase2.PublicKey - r.Hash = append(r.Hash, phase2.Hash...) +func proveVerifyCircuit(t *testing.T, pk groth16.ProvingKey, vk groth16.VerifyingKey) { + + witness, err := frontend.NewWitness(assignCircuit(), curve.ID.ScalarField()) + require.NoError(t, err) + + pubWitness, err := witness.Public() + require.NoError(t, err) + + // groth16: ensure proof is verified + proof, err := groth16.Prove(getTestCircuit(), pk, witness) + require.NoError(t, err) - return r + err = groth16.Verify(proof, vk, pubWitness) + require.NoError(t, err) } diff --git a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl index f7c67c036d..43247ff5d9 100644 --- a/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl +++ b/internal/generator/backend/template/zkpschemes/groth16/mpcsetup/utils.go.tmpl @@ -1,45 +1,13 @@ import ( - "bytes" "math/big" "math/bits" - "runtime" - - "github.com/consensys/gnark-crypto/ecc" - {{- template "import_fr" . }} {{- template "import_curve" . }} + gcUtils "github.com/consensys/gnark-crypto/utils" "github.com/consensys/gnark/internal/utils" + "sync" ) -type PublicKey struct { - SG curve.G1Affine - SXG curve.G1Affine - XR curve.G2Affine -} - -func newPublicKey(x fr.Element, challenge []byte, dst byte) PublicKey { - var pk PublicKey - _, _, g1, _ := curve.Generators() - - var s fr.Element - var sBi big.Int - s.SetRandom() - s.BigInt(&sBi) - pk.SG.ScalarMultiplication(&g1, &sBi) - - // compute x*sG1 - var xBi big.Int - x.BigInt(&xBi) - pk.SXG.ScalarMultiplication(&pk.SG, &xBi) - - // generate R based on sG1, sxG1, challenge, and domain separation tag (tau, alpha or beta) - R := genR(pk.SG, pk.SXG, challenge, dst) - - // compute x*spG2 - pk.XR.ScalarMultiplication(&R, &xBi) - return pk -} - func bitReverse[T any](a []T) { n := uint64(len(a)) nn := uint64(64 - bits.TrailingZeros64(n)) @@ -52,17 +20,21 @@ func bitReverse[T any](a []T) { } } -// Returns [1, a, a², ..., aⁿ⁻¹ ] in Montgomery form -func powers(a fr.Element, n int) []fr.Element { - result := make([]fr.Element, n) - result[0] = fr.NewElement(1) - for i := 1; i < n; i++ { - result[i].Mul(&result[i-1], &a) +// Returns [1, a, a², ..., aᴺ⁻¹ ] +func powers(a *fr.Element, N int) []fr.Element { + if N == 0 { + return nil + } + result := make([]fr.Element, N) + result[0].SetOne() + for i := 1; i < N; i++ { + result[i].Mul(&result[i-1], a) } return result } -// Returns [aᵢAᵢ, ...] in G1 +// Returns [aᵢAᵢ, ...]∈𝔾₁ +// it assumes len(A) ≤ len(a) func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -73,7 +45,8 @@ func scaleG1InPlace(A []curve.G1Affine, a []fr.Element) { }) } -// Returns [aᵢAᵢ, ...] in G2 +// Returns [aᵢAᵢ, ...]∈𝔾₂ +// it assumes len(A) ≤ len(a) func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { utils.Parallelize(len(A), func(start, end int) { var tmp big.Int @@ -84,70 +57,38 @@ func scaleG2InPlace(A []curve.G2Affine, a []fr.Element) { }) } -// Check e(a₁, a₂) = e(b₁, b₂) -func sameRatio(a1, b1 curve.G1Affine, a2, b2 curve.G2Affine) bool { - if !a1.IsInSubGroup() || !b1.IsInSubGroup() || !a2.IsInSubGroup() || !b2.IsInSubGroup() { - panic("invalid point not in subgroup") - } - var na2 curve.G2Affine - na2.Neg(&a2) - res, err := curve.PairingCheck( - []curve.G1Affine{a1, b1}, - []curve.G2Affine{na2, b2}) - if err != nil { - panic(err) - } - return res +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG1(wp *gcUtils.WorkerPool, s []curve.G1Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// returns a = ∑ rᵢAᵢ, b = ∑ rᵢBᵢ -func merge(A, B []curve.G1Affine) (a, b curve.G1Affine) { - nc := runtime.NumCPU() - r := make([]fr.Element, len(A)) - for i := 0; i < len(A); i++ { - r[i].SetRandom() - } - a.MultiExp(A, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - b.MultiExp(B, r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +// TODO @Tabaie replace with batch subgroup check, when available +func areInSubGroupG2(wp *gcUtils.WorkerPool, s []curve.G2Affine, errorReporter func(int)) *sync.WaitGroup { + return wp.Submit(len(s), func(start, end int) { + for i := start; i < end; i++ { + if !s[i].IsInSubGroup() { + errorReporter(i) + break + } + } + }, 1024) // TODO @Tabaie experimentally optimize minBlock } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G1 -func linearCombinationG1(A []curve.G1Affine) (L1, L2 curve.G1Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return +type verificationSettings struct { + wp *gcUtils.WorkerPool } -// L1 = ∑ rᵢAᵢ, L2 = ∑ rᵢAᵢ₊₁ in G2 -func linearCombinationG2(A []curve.G2Affine) (L1, L2 curve.G2Affine) { - nc := runtime.NumCPU() - n := len(A) - r := make([]fr.Element, n-1) - for i := 0; i < n-1; i++ { - r[i].SetRandom() - } - L1.MultiExp(A[:n-1], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - L2.MultiExp(A[1:], r, ecc.MultiExpConfig{NbTasks: nc / 2}) - return -} +type verificationOption func(*verificationSettings) -// Generate R in G₂ as Hash(gˢ, gˢˣ, challenge, dst) -func genR(sG1, sxG1 curve.G1Affine, challenge []byte, dst byte) curve.G2Affine { - var buf bytes.Buffer - buf.Grow(len(challenge) + curve.SizeOfG1AffineUncompressed*2) - buf.Write(sG1.Marshal()) - buf.Write(sxG1.Marshal()) - buf.Write(challenge) - spG2, err := curve.HashToG2(buf.Bytes(), []byte{dst}) - if err != nil { - panic(err) +func WithWorkerPool(wp *gcUtils.WorkerPool) verificationOption { + return func(s *verificationSettings) { + s.wp = wp } - return spG2 -} +} \ No newline at end of file diff --git a/internal/algo_utils/algo_utils.go b/internal/utils/algo_utils.go similarity index 99% rename from internal/algo_utils/algo_utils.go rename to internal/utils/algo_utils.go index 37dbc8c464..f836625370 100644 --- a/internal/algo_utils/algo_utils.go +++ b/internal/utils/algo_utils.go @@ -1,4 +1,4 @@ -package algo_utils +package utils import "github.com/bits-and-blooms/bitset" diff --git a/internal/algo_utils/algo_utils_test.go b/internal/utils/algo_utils_test.go similarity index 98% rename from internal/algo_utils/algo_utils_test.go rename to internal/utils/algo_utils_test.go index 85ab4bf294..2925bd6692 100644 --- a/internal/algo_utils/algo_utils_test.go +++ b/internal/utils/algo_utils_test.go @@ -1,4 +1,4 @@ -package algo_utils +package utils import ( "github.com/stretchr/testify/assert" diff --git a/internal/utils/slices.go b/internal/utils/slices.go new file mode 100644 index 0000000000..9c50ed9b01 --- /dev/null +++ b/internal/utils/slices.go @@ -0,0 +1,9 @@ +package utils + +// AppendRefs returns append(s, &v[0], &v[1], ...). +func AppendRefs[T any](s []any, v []T) []any { + for i := range v { + s = append(s, &v[i]) + } + return s +} diff --git a/internal/utils/test_utils/test_utils.go b/internal/utils/test_utils/test_utils.go new file mode 100644 index 0000000000..51f0a9d085 --- /dev/null +++ b/internal/utils/test_utils/test_utils.go @@ -0,0 +1,39 @@ +package test_utils + +import ( + "bytes" + "github.com/stretchr/testify/require" + "io" + "testing" +) + +// Range (n, startingPoints...) = [startingPoints[0], startingPoints[0]+1, ..., startingPoints[0]+n-1, startingPoints[1], startingPoints[1]+1, ...,] +// or [0, 1, ..., n-1] if startingPoints is empty +func Range(n int, startingPoints ...int) []int { + if len(startingPoints) == 0 { + startingPoints = []int{0} + } + res := make([]int, n*len(startingPoints)) + + for i := range startingPoints { + for j := range n { + res[i*n+j] = startingPoints[i] + j + } + } + + return res +} + +func CopyThruSerialization(t *testing.T, dst, src interface { + io.ReaderFrom + io.WriterTo +}) { + var bb bytes.Buffer + + n, err := src.WriteTo(&bb) + require.NoError(t, err) + require.Equal(t, int64(bb.Len()), n) + n, err = dst.ReadFrom(bytes.NewReader(bb.Bytes())) + require.NoError(t, err) + require.Equal(t, int64(bb.Len()), n) +} diff --git a/io/roundtrip.go b/io/roundtrip.go index ecac78afaf..c598806a87 100644 --- a/io/roundtrip.go +++ b/io/roundtrip.go @@ -3,6 +3,7 @@ package io import ( "bytes" "errors" + "fmt" "io" "reflect" ) @@ -21,8 +22,8 @@ func RoundTripCheck(from any, to func() any) error { if err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (ReadFrom)") + if err = equal(from, r); err != nil { + return fmt.Errorf("ReadFrom: %w", err) } if written != read { return errors.New("bytes written / read don't match") @@ -35,8 +36,8 @@ func RoundTripCheck(from any, to func() any) error { if err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (UnsafeReadFrom)") + if err = equal(from, r); err != nil { + return fmt.Errorf("UnsafeReadFrom: %w", err) } if written != read { return errors.New("bytes written / read don't match") @@ -85,8 +86,28 @@ func DumpRoundTripCheck(from any, to func() any) error { if err := r.ReadDump(bytes.NewReader(buf.Bytes())); err != nil { return err } - if !reflect.DeepEqual(from, r) { - return errors.New("reconstructed object don't match original (ReadDump)") + if err := equal(from, r); err != nil { + return fmt.Errorf("ReadDump: %w", err) } return nil } + +func equal(a, b any) error { + // check for a custom Equal method + aV := reflect.ValueOf(a) + eq := aV.MethodByName("Equal") + if eq.IsValid() { + res := eq.Call([]reflect.Value{reflect.ValueOf(b)}) + if len(res) != 1 { + return errors.New("`Equal` method must return a single bool") + } + if res[0].Bool() { + return nil + } + return errors.New("reconstructed object does not match the original (custom Equal)") + } + if reflect.DeepEqual(a, b) { + return nil + } + return errors.New("reconstructed object does not match the original (reflect.DeepEqual)") +} diff --git a/std/gkr/api.go b/std/gkr/api.go index b46cfad8e5..03d8d8f71a 100644 --- a/std/gkr/api.go +++ b/std/gkr/api.go @@ -2,7 +2,7 @@ package gkr import ( "github.com/consensys/gnark/constraint" - "github.com/consensys/gnark/internal/algo_utils" + "github.com/consensys/gnark/internal/utils" ) func frontendVarToInt(a constraint.GkrVariable) int { @@ -12,7 +12,7 @@ func frontendVarToInt(a constraint.GkrVariable) int { func (api *API) NamedGate(gate string, in ...constraint.GkrVariable) constraint.GkrVariable { api.toStore.Circuit = append(api.toStore.Circuit, constraint.GkrWire{ Gate: gate, - Inputs: algo_utils.Map(in, frontendVarToInt), + Inputs: utils.Map(in, frontendVarToInt), }) api.assignments = append(api.assignments, nil) return constraint.GkrVariable(len(api.toStore.Circuit) - 1) diff --git a/std/gkr/api_test.go b/std/gkr/api_test.go index 25fc3c3c1e..1dec782f71 100644 --- a/std/gkr/api_test.go +++ b/std/gkr/api_test.go @@ -268,7 +268,6 @@ func TestApiMul(t *testing.T) { func BenchmarkMiMCMerkleTree(b *testing.B) { depth := 14 - //fmt.Println("start") bottom := make([]frontend.Variable, 1<